irgopt.c 50.2 KB
Newer Older
1
/* Coyright (C) 1998 - 2002 by Universitaet Karlsruhe
Sebastian Felis's avatar
Sebastian Felis committed
2
3
4
5
6
* All rights reserved.
*
* Author: Christian Schaefer, Goetz Lindenmaier, Sebastian Felis
*
* Optimizations for a whole ir graph, i.e., a procedure.
Christian Schäfer's avatar
Christian Schäfer committed
7
8
*/

Boris Boesler's avatar
Boris Boesler committed
9
10
/* $Id$ */

Boris Boesler's avatar
added    
Boris Boesler committed
11
12
13
14
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif

Götz Lindenmaier's avatar
Götz Lindenmaier committed
15
16
# include <assert.h>

17
# include "irprog.h"
Christian Schäfer's avatar
Christian Schäfer committed
18
# include "irgopt.h"
Götz Lindenmaier's avatar
Götz Lindenmaier committed
19
# include "irnode_t.h"
Götz Lindenmaier's avatar
Götz Lindenmaier committed
20
# include "irgraph_t.h"
21
# include "iropt_t.h"
Christian Schäfer's avatar
Christian Schäfer committed
22
23
# include "irgwalk.h"
# include "ircons.h"
Götz Lindenmaier's avatar
Götz Lindenmaier committed
24
# include "misc.h"
Götz Lindenmaier's avatar
Götz Lindenmaier committed
25
# include "irgmod.h"
26
# include "array.h"
Götz Lindenmaier's avatar
Götz Lindenmaier committed
27
# include "pset.h"
28
# include "pdeq.h"       /* Fuer code placement */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
29
# include "irouts.h"
30
31
# include "irloop.h"
# include "irbackedge_t.h"
32
33

/* Defined in iropt.c */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
34
35
pset *new_identities (void);
void  del_identities (pset *value_table);
36
void  add_identities   (pset *value_table, ir_node *node);
37

Götz Lindenmaier's avatar
Götz Lindenmaier committed
38
39
/********************************************************************/
/* apply optimizations of iropt to all nodes.                       */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
40
/********************************************************************/
Götz Lindenmaier's avatar
Götz Lindenmaier committed
41

42
static void init_link (ir_node *n, void *env) {
43
44
45
  set_irn_link(n, NULL);
}

46
static void
Christian Schäfer's avatar
Christian Schäfer committed
47
optimize_in_place_wrapper (ir_node *n, void *env) {
48
  int i;
Christian Schäfer's avatar
Christian Schäfer committed
49
50
  ir_node *optimized;

51
  for (i = 0; i < get_irn_arity(n); i++) {
52
    optimized = optimize_in_place_2(get_irn_n(n, i));
Christian Schäfer's avatar
Christian Schäfer committed
53
    set_irn_n(n, i, optimized);
54
55
56
57
58
  }

  if (get_irn_op(n) == op_Block) {
    optimized = optimize_in_place_2(n);
    if (optimized != n) exchange (n, optimized);
Christian Schäfer's avatar
Christian Schäfer committed
59
60
61
62
63
64
65
66
  }
}

void
local_optimize_graph (ir_graph *irg) {
  ir_graph *rem = current_ir_graph;
  current_ir_graph = irg;

67
  /* Handle graph state */
68
  assert(get_irg_phase_state(irg) != phase_building);
69
70
71
72
  if (get_opt_global_cse())
    set_irg_pinned(current_ir_graph, floats);
  if (get_irg_outs_state(current_ir_graph) == outs_consistent)
    set_irg_outs_inconsistent(current_ir_graph);
73
74
  if (get_irg_dom_state(current_ir_graph) == dom_consistent)
    set_irg_dom_inconsistent(current_ir_graph);
75
76

  /* Clean the value_table in irg for the cse. */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
77
78
79
  del_identities(irg->value_table);
  irg->value_table = new_identities();

Christian Schäfer's avatar
Christian Schäfer committed
80
  /* walk over the graph */
81
  irg_walk(irg->end, init_link, optimize_in_place_wrapper, NULL);
Christian Schäfer's avatar
Christian Schäfer committed
82
83
84
85

  current_ir_graph = rem;
}

Götz Lindenmaier's avatar
Götz Lindenmaier committed
86
87
88
/********************************************************************/
/* Routines for dead node elimination / copying garbage collection  */
/* of the obstack.                                                  */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
89
/********************************************************************/
Götz Lindenmaier's avatar
Götz Lindenmaier committed
90

Götz Lindenmaier's avatar
Götz Lindenmaier committed
91
/* Remeber the new node in the old node by using a field all nodes have. */
92
static INLINE void
Christian Schäfer's avatar
Christian Schäfer committed
93
94
set_new_node (ir_node *old, ir_node *new)
{
Götz Lindenmaier's avatar
Götz Lindenmaier committed
95
  old->link = new;
Christian Schäfer's avatar
Christian Schäfer committed
96
97
98
}

/* Get this new node, before the old node is forgotton.*/
99
static INLINE ir_node *
Christian Schäfer's avatar
Christian Schäfer committed
100
101
get_new_node (ir_node * n)
{
Götz Lindenmaier's avatar
Götz Lindenmaier committed
102
103
104
  return n->link;
}

Götz Lindenmaier's avatar
Götz Lindenmaier committed
105
106
/* We use the block_visited flag to mark that we have computed the
   number of useful predecessors for this block.
107
108
109
110
   Further we encode the new arity in this flag in the old blocks.
   Remembering the arity is useful, as it saves a lot of pointer
   accesses.  This function is called for all Phi and Block nodes
   in a Block. */
111
static INLINE int
Götz Lindenmaier's avatar
Götz Lindenmaier committed
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
compute_new_arity(ir_node *b) {
  int i, res;
  int irg_v, block_v;

  irg_v = get_irg_block_visited(current_ir_graph);
  block_v = get_Block_block_visited(b);
  if (block_v >= irg_v) {
    /* we computed the number of preds for this block and saved it in the
       block_v flag */
    return block_v - irg_v;
  } else {
    /* compute the number of good predecessors */
    res = get_irn_arity(b);
    for (i = 0; i < get_irn_arity(b); i++)
      if (get_irn_opcode(get_irn_n(b, i)) == iro_Bad) res--;
    /* save it in the flag. */
    set_Block_block_visited(b, irg_v + res);
    return res;
  }
}

133
static INLINE void new_backedge_info(ir_node *n) {
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
  switch(get_irn_opcode(n)) {
  case iro_Block:
    n->attr.block.cg_backedge = NULL;
    n->attr.block.backedge = new_backedge_arr(current_ir_graph->obst, get_irn_arity(n));
    break;
  case iro_Phi:
    n->attr.phi_backedge = new_backedge_arr(current_ir_graph->obst, get_irn_arity(n));
    break;
  case iro_Filter:
    n->attr.filter.backedge = new_backedge_arr(current_ir_graph->obst, get_irn_arity(n));
    break;
  default: ;
  }
}

Götz Lindenmaier's avatar
Götz Lindenmaier committed
149
/* Copies the node to the new obstack. The Ins of the new node point to
150
151
   the predecessors on the old obstack.  For block/phi nodes not all
   predecessors might be copied.  n->link points to the new node.
Götz Lindenmaier's avatar
Götz Lindenmaier committed
152
   For Phi and Block nodes the function allocates in-arrays with an arity
Götz Lindenmaier's avatar
Götz Lindenmaier committed
153
154
   only for useful predecessors.  The arity is determined by counting
   the non-bad predecessors of the block. */
155
static void
Götz Lindenmaier's avatar
Götz Lindenmaier committed
156
157
copy_node (ir_node *n, void *env) {
  ir_node *nn, *block;
Götz Lindenmaier's avatar
Götz Lindenmaier committed
158
  int new_arity;
159

Götz Lindenmaier's avatar
Götz Lindenmaier committed
160
161
  if (get_irn_opcode(n) == iro_Block) {
    block = NULL;
Götz Lindenmaier's avatar
Götz Lindenmaier committed
162
    new_arity = compute_new_arity(n);
163
    n->attr.block.graph_arr = NULL;
Götz Lindenmaier's avatar
Götz Lindenmaier committed
164
165
  } else {
    block = get_nodes_Block(n);
Götz Lindenmaier's avatar
Götz Lindenmaier committed
166
167
168
169
170
    if (get_irn_opcode(n) == iro_Phi) {
      new_arity = compute_new_arity(block);
    } else {
      new_arity = get_irn_arity(n);
    }
Götz Lindenmaier's avatar
Götz Lindenmaier committed
171
  }
Götz Lindenmaier's avatar
Götz Lindenmaier committed
172
173
  nn = new_ir_node(get_irn_dbg_info(n),
		   current_ir_graph,
Götz Lindenmaier's avatar
Götz Lindenmaier committed
174
175
176
		   block,
		   get_irn_op(n),
		   get_irn_mode(n),
Götz Lindenmaier's avatar
Götz Lindenmaier committed
177
		   new_arity,
Götz Lindenmaier's avatar
Götz Lindenmaier committed
178
		   get_irn_in(n));
Götz Lindenmaier's avatar
Götz Lindenmaier committed
179
180
181
  /* Copy the attributes.  These might point to additional data.  If this
     was allocated on the old obstack the pointers now are dangling.  This
     frees e.g. the memory of the graph_arr allocated in new_immBlock. */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
182
  copy_attrs(n, nn);
183
  new_backedge_info(nn);
Götz Lindenmaier's avatar
Götz Lindenmaier committed
184
  set_new_node(n, nn);
185
186
187
188

  /*  printf("\n old node: "); DDMSG2(n);
      printf(" new node: "); DDMSG2(nn); */

Christian Schäfer's avatar
Christian Schäfer committed
189
190
}

Götz Lindenmaier's avatar
Götz Lindenmaier committed
191
192
/* Copies new predecessors of old node to new node remembered in link.
   Spare the Bad predecessors of Phi and Block nodes. */
193
static void
Götz Lindenmaier's avatar
Götz Lindenmaier committed
194
copy_preds (ir_node *n, void *env) {
195
  ir_node *nn, *block;
Götz Lindenmaier's avatar
Götz Lindenmaier committed
196
  int i, j;
Götz Lindenmaier's avatar
Götz Lindenmaier committed
197
198

  nn = get_new_node(n);
Götz Lindenmaier's avatar
Götz Lindenmaier committed
199

200
201
202
203
  /* printf("\n old node: "); DDMSG2(n);
     printf(" new node: "); DDMSG2(nn);
     printf(" arities: old: %d, new: %d\n", get_irn_arity(n), get_irn_arity(nn)); */

Götz Lindenmaier's avatar
Götz Lindenmaier committed
204
205
206
207
208
209
  if (get_irn_opcode(n) == iro_Block) {
    /* Don't copy Bad nodes. */
    j = 0;
    for (i = 0; i < get_irn_arity(n); i++)
      if (get_irn_opcode(get_irn_n(n, i)) != iro_Bad) {
	set_irn_n (nn, j, get_new_node(get_irn_n(n, i)));
210
	//if (is_backedge(n, i)) set_backedge(nn, j);
Götz Lindenmaier's avatar
Götz Lindenmaier committed
211
212
	j++;
      }
213
214
    /* repair the block visited flag from above misuse. Repair it in both
       graphs so that the old one can still be used. */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
215
    set_Block_block_visited(nn, 0);
216
    set_Block_block_visited(n, 0);
Götz Lindenmaier's avatar
Götz Lindenmaier committed
217
    /* Local optimization could not merge two subsequent blocks if
218
       in array contained Bads.  Now it's possible.
219
       We don't call optimize_in_place as it requires
220
       that the fields in ir_graph are set properly. */
221
    if ((get_opt_control_flow_straightening()) &&
222
223
	(get_Block_n_cfgpreds(nn) == 1) &&
	(get_irn_op(get_Block_cfgpred(nn, 0)) == op_Jmp))
224
      exchange(nn, get_nodes_Block(get_Block_cfgpred(nn, 0)));
Götz Lindenmaier's avatar
Götz Lindenmaier committed
225
226
227
228
229
230
231
232
233
  } else if (get_irn_opcode(n) == iro_Phi) {
    /* Don't copy node if corresponding predecessor in block is Bad.
       The Block itself should not be Bad. */
    block = get_nodes_Block(n);
    set_irn_n (nn, -1, get_new_node(block));
    j = 0;
    for (i = 0; i < get_irn_arity(n); i++)
      if (get_irn_opcode(get_irn_n(block, i)) != iro_Bad) {
	set_irn_n (nn, j, get_new_node(get_irn_n(n, i)));
234
	//if (is_backedge(n, i)) set_backedge(nn, j);
Götz Lindenmaier's avatar
Götz Lindenmaier committed
235
236
	j++;
      }
237
238
239
    /* If the pre walker reached this Phi after the post walker visited the
       block block_visited is > 0. */
    set_Block_block_visited(get_nodes_Block(n), 0);
Götz Lindenmaier's avatar
Götz Lindenmaier committed
240
    /* Compacting the Phi's ins might generate Phis with only one
Götz Lindenmaier's avatar
Götz Lindenmaier committed
241
       predecessor. */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
242
    if (get_irn_arity(n) == 1)
243
      exchange(n, get_irn_n(n, 0));
Götz Lindenmaier's avatar
Götz Lindenmaier committed
244
245
246
247
  } else {
    for (i = -1; i < get_irn_arity(n); i++)
      set_irn_n (nn, i, get_new_node(get_irn_n(n, i)));
  }
248
249
250
251
  /* Now the new node is complete.  We can add it to the hash table for cse.
     @@@ inlinening aborts if we identify End. Why? */
  if(get_irn_op(nn) != op_End)
    add_identities (current_ir_graph->value_table, nn);
Götz Lindenmaier's avatar
Götz Lindenmaier committed
252
253
}

254
/* Copies the graph recursively, compacts the keepalive of the end node. */
255
static void
256
copy_graph (void) {
257
  ir_node *oe, *ne; /* old end, new end */
258
  ir_node *ka;      /* keep alive */
259
260
261
262
  int i;

  oe = get_irg_end(current_ir_graph);
  /* copy the end node by hand, allocate dynamic in array! */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
263
264
  ne = new_ir_node(get_irn_dbg_info(oe),
		   current_ir_graph,
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
		   NULL,
		   op_End,
		   mode_X,
		   -1,
		   NULL);
  /* Copy the attributes.  Well, there might be some in the future... */
  copy_attrs(oe, ne);
  set_new_node(oe, ne);

  /* copy the live nodes */
  irg_walk(get_nodes_Block(oe), copy_node, copy_preds, NULL);
  /* copy_preds for the end node ... */
  set_nodes_Block(ne, get_new_node(get_nodes_Block(oe)));

  /** ... and now the keep alives. **/
  /* First pick the not marked block nodes and walk them.  We must pick these
     first as else we will oversee blocks reachable from Phis. */
  for (i = 0; i < get_irn_arity(oe); i++) {
    ka = get_irn_n(oe, i);
    if ((get_irn_op(ka) == op_Block) &&
	(get_irn_visited(ka) < get_irg_visited(current_ir_graph))) {
      /* We must keep the block alive and copy everything reachable */
      set_irg_visited(current_ir_graph, get_irg_visited(current_ir_graph)-1);
      irg_walk(ka, copy_node, copy_preds, NULL);
      add_End_keepalive(ne, get_new_node(ka));
    }
  }

  /* Now pick the Phis.  Here we will keep all! */
  for (i = 0; i < get_irn_arity(oe); i++) {
    ka = get_irn_n(oe, i);
    if ((get_irn_op(ka) == op_Phi)) {
      if (get_irn_visited(ka) < get_irg_visited(current_ir_graph)) {
	/* We didn't copy the Phi yet.  */
	set_irg_visited(current_ir_graph, get_irg_visited(current_ir_graph)-1);
	irg_walk(ka, copy_node, copy_preds, NULL);
      }
      add_End_keepalive(ne, get_new_node(ka));
    }
  }
}

Götz Lindenmaier's avatar
Götz Lindenmaier committed
307
/* Copies the graph reachable from current_ir_graph->end to the obstack
308
   in current_ir_graph and fixes the environment.
Götz Lindenmaier's avatar
Götz Lindenmaier committed
309
310
   Then fixes the fields in current_ir_graph containing nodes of the
   graph.  */
311
static void
312
copy_graph_env (void) {
313
  ir_node *old_end;
Götz Lindenmaier's avatar
Götz Lindenmaier committed
314
  /* Not all nodes remembered in current_ir_graph might be reachable
315
     from the end node.  Assure their link is set to NULL, so that
Götz Lindenmaier's avatar
Götz Lindenmaier committed
316
317
318
319
320
321
322
323
324
     we can test whether new nodes have been computed. */
  set_irn_link(get_irg_frame  (current_ir_graph), NULL);
  set_irn_link(get_irg_globals(current_ir_graph), NULL);
  set_irn_link(get_irg_args   (current_ir_graph), NULL);

  /* we use the block walk flag for removing Bads from Blocks ins. */
  inc_irg_block_visited(current_ir_graph);

  /* copy the graph */
325
  copy_graph();
Götz Lindenmaier's avatar
Götz Lindenmaier committed
326
327

  /* fix the fields in current_ir_graph */
328
329
330
  old_end = get_irg_end(current_ir_graph);
  set_irg_end (current_ir_graph, get_new_node(old_end));
  free_End(old_end);
Götz Lindenmaier's avatar
Götz Lindenmaier committed
331
  set_irg_end_block  (current_ir_graph, get_new_node(get_irg_end_block(current_ir_graph)));
332
333
334
335
336
337
338
339
340
341
342
343
  if (get_irn_link(get_irg_frame(current_ir_graph)) == NULL) {
    copy_node (get_irg_frame(current_ir_graph), NULL);
    copy_preds(get_irg_frame(current_ir_graph), NULL);
  }
  if (get_irn_link(get_irg_globals(current_ir_graph)) == NULL) {
    copy_node (get_irg_globals(current_ir_graph), NULL);
    copy_preds(get_irg_globals(current_ir_graph), NULL);
  }
  if (get_irn_link(get_irg_args(current_ir_graph)) == NULL) {
    copy_node (get_irg_args(current_ir_graph), NULL);
    copy_preds(get_irg_args(current_ir_graph), NULL);
  }
Götz Lindenmaier's avatar
Götz Lindenmaier committed
344
  set_irg_start  (current_ir_graph, get_new_node(get_irg_start(current_ir_graph)));
345

Götz Lindenmaier's avatar
Götz Lindenmaier committed
346
347
348
349
350
351
352
353
354
355
  set_irg_start_block(current_ir_graph,
		      get_new_node(get_irg_start_block(current_ir_graph)));
  set_irg_frame  (current_ir_graph, get_new_node(get_irg_frame(current_ir_graph)));
  set_irg_globals(current_ir_graph, get_new_node(get_irg_globals(current_ir_graph)));
  set_irg_args   (current_ir_graph, get_new_node(get_irg_args(current_ir_graph)));
  if (get_irn_link(get_irg_bad(current_ir_graph)) == NULL) {
    copy_node(get_irg_bad(current_ir_graph), NULL);
    copy_preds(get_irg_bad(current_ir_graph), NULL);
  }
  set_irg_bad(current_ir_graph, get_new_node(get_irg_bad(current_ir_graph)));
356
357
358
359
360
  if (get_irn_link(get_irg_unknown(current_ir_graph)) == NULL) {
    copy_node(get_irg_unknown(current_ir_graph), NULL);
    copy_preds(get_irg_unknown(current_ir_graph), NULL);
  }
  set_irg_unknown(current_ir_graph, get_new_node(get_irg_unknown(current_ir_graph)));
Götz Lindenmaier's avatar
Götz Lindenmaier committed
361
362
}

363
364
365
366
367
368
/* Copies all reachable nodes to a new obstack.  Removes bad inputs
   from block nodes and the corresponding inputs from Phi nodes.
   Merges single exit blocks with single entry blocks and removes
   1-input Phis.
   Adds all new nodes to a new hash table for cse.  Does not
   perform cse, so the hash table might contain common subexpressions. */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
369
370
371
372
373
374
375
376
377
378
379
/* Amroq call this emigrate() */
void
dead_node_elimination(ir_graph *irg) {
  ir_graph *rem;
  struct obstack *graveyard_obst = NULL;
  struct obstack *rebirth_obst   = NULL;

  /* Remember external state of current_ir_graph. */
  rem = current_ir_graph;
  current_ir_graph = irg;

380
  /* Handle graph state */
381
  assert(get_irg_phase_state(current_ir_graph) != phase_building);
382
383
  free_outs(current_ir_graph);

384
385
386
  /* @@@ so far we loose loops when copying */
  set_irg_loop(current_ir_graph, NULL);

Götz Lindenmaier's avatar
Götz Lindenmaier committed
387
388
389
390
391
392
393
394
395
396
397
  if (get_optimize() && get_opt_dead_node_elimination()) {

    /* A quiet place, where the old obstack can rest in peace,
       until it will be cremated. */
    graveyard_obst = irg->obst;

    /* A new obstack, where the reachable nodes will be copied to. */
    rebirth_obst = (struct obstack *) xmalloc (sizeof (struct obstack));
    current_ir_graph->obst = rebirth_obst;
    obstack_init (current_ir_graph->obst);

398
399
400
401
    /* We also need a new hash table for cse */
    del_identities (irg->value_table);
    irg->value_table = new_identities ();

Götz Lindenmaier's avatar
Götz Lindenmaier committed
402
    /* Copy the graph from the old to the new obstack */
403
    copy_graph_env();
Götz Lindenmaier's avatar
Götz Lindenmaier committed
404
405
406
407
408
409
410
411

    /* Free memory from old unoptimized obstack */
    obstack_free(graveyard_obst, 0);  /* First empty the obstack ... */
    xfree (graveyard_obst);           /* ... then free it.           */
  }

  current_ir_graph = rem;
}
412

413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
/* Relink bad predeseccors of a block and store the old in array to the
   link field. This function is called by relink_bad_predecessors().
   The array of link field starts with the block operand at position 0.
   If block has bad predecessors, create a new in array without bad preds.
   Otherwise let in array untouched. */
static void relink_bad_block_predecessors(ir_node *n, void *env) {
  ir_node **new_in, *irn;
  int i, new_irn_n, old_irn_arity, new_irn_arity = 0;

  /* if link field of block is NULL, look for bad predecessors otherwise
     this is allready done */
  if (get_irn_op(n) == op_Block &&
      get_irn_link(n) == NULL) {

    /* save old predecessors in link field (position 0 is the block operand)*/
    set_irn_link(n, (void *)get_irn_in(n));

    /* count predecessors without bad nodes */
    old_irn_arity = get_irn_arity(n);
    for (i = 0; i < old_irn_arity; i++)
      if (!is_Bad(get_irn_n(n, i))) new_irn_arity++;

    /* arity changing: set new predecessors without bad nodes */
    if (new_irn_arity < old_irn_arity) {
      /* get new predecessor array without Block predecessor */
      new_in = NEW_ARR_D (ir_node *, current_ir_graph->obst, (new_irn_arity+1));

      /* set new predeseccors in array */
      new_in[0] = NULL;
      new_irn_n = 1;
      for (i = 1; i < old_irn_arity; i++) {
	irn = get_irn_n(n, i);
	if (!is_Bad(irn)) new_in[new_irn_n++] = irn;
      }
      n->in = new_in;
    } /* ir node has bad predecessors */

  } /* Block is not relinked */
}

/* Relinks Bad predecesors from Bocks and Phis called by walker
   remove_bad_predecesors(). If n is a Block, call
   relink_bad_block_redecessors(). If n is a Phinode, call also the relinking
   function of Phi's Block. If this block has bad predecessors, relink preds
   of the Phinode. */
static void relink_bad_predecessors(ir_node *n, void *env) {
  ir_node *block, **old_in;
  int i, old_irn_arity, new_irn_arity;

  /* relink bad predeseccors of a block */
  if (get_irn_op(n) == op_Block)
    relink_bad_block_predecessors(n, env);

  /* If Phi node relink its block and its predecessors */
  if (get_irn_op(n) == op_Phi) {

    /* Relink predeseccors of phi's block */
    block = get_nodes_Block(n);
    if (get_irn_link(block) == NULL)
      relink_bad_block_predecessors(block, env);

    old_in = (ir_node **)get_irn_link(block); /* Of Phi's Block */
    old_irn_arity = ARR_LEN(old_in);

    /* Relink Phi predeseccors if count of predeseccors changed */
    if (old_irn_arity != ARR_LEN(get_irn_in(block))) {
      /* set new predeseccors in array
	 n->in[0] remains the same block */
      new_irn_arity = 1;
      for(i = 1; i < old_irn_arity; i++)
	if (!is_Bad((ir_node *)old_in[i])) n->in[new_irn_arity++] = n->in[i];

      ARR_SETLEN(ir_node *, n->in, new_irn_arity);
    }

  } /* n is a Phi node */
}

/* Removes Bad Bad predecesors from Blocks and the corresponding
   inputs to Phi nodes as in dead_node_elimination but without
   copying the graph.
   On walking up set the link field to NULL, on walking down call
   relink_bad_predecessors() (This function stores the old in array
   to the link field and sets a new in array if arity of predecessors
   changes) */
498
void remove_bad_predecessors(ir_graph *irg) {
499
  irg_walk_graph(irg, init_link, relink_bad_predecessors, NULL);
500
501
}

502

503
504
505
506
/**********************************************************************/
/*  Funcionality for inlining                                         */
/**********************************************************************/

Götz Lindenmaier's avatar
Götz Lindenmaier committed
507
508
509
510
/* Copy node for inlineing.  Copies the node by calling copy_node and
   then updates the entity if it's a local one.  env must be a pointer
   to the frame type of the procedure. The new entities must be in
   the link field of the entities. */
511
static INLINE void
Götz Lindenmaier's avatar
Götz Lindenmaier committed
512
513
514
515
516
517
518
519
520
521
522
523
524
525
copy_node_inline (ir_node *n, void *env) {
  ir_node *new;
  type *frame_tp = (type *)env;

  copy_node(n, NULL);
  if (get_irn_op(n) == op_Sel) {
    new = get_new_node (n);
    assert(get_irn_op(new) == op_Sel);
    if (get_entity_owner(get_Sel_entity(n)) == frame_tp) {
      set_Sel_entity(new, get_entity_link(get_Sel_entity(n)));
    }
  }
}

526
527
528
529
530
531
532
533
void inline_method(ir_node *call, ir_graph *called_graph) {
  ir_node *pre_call;
  ir_node *post_call, *post_bl;
  ir_node *in[5];
  ir_node *end, *end_bl;
  ir_node **res_pred;
  ir_node **cf_pred;
  ir_node *ret, *phi;
534
  ir_node *cf_op = NULL, *bl;
535
  int arity, n_ret, n_exc, n_res, i, j, rem_opt;
536
  type *called_frame;
537

538
  if (!get_optimize() || !get_opt_inline()) return;
Michael Beck's avatar
Michael Beck committed
539
  /* --  Turn off optimizations, this can cause problems when allocating new nodes. -- */
540
541
  rem_opt = get_optimize();
  set_optimize(0);
542

543
  /* Handle graph state */
544
  assert(get_irg_phase_state(current_ir_graph) != phase_building);
545
546
  assert(get_irg_pinned(current_ir_graph) == pinned);
  assert(get_irg_pinned(called_graph) == pinned);
547
548
549
  if (get_irg_outs_state(current_ir_graph) == outs_consistent)
    set_irg_outs_inconsistent(current_ir_graph);

Michael Beck's avatar
Michael Beck committed
550
  /* -- Check preconditions -- */
551
  assert(get_irn_op(call) == op_Call);
Michael Beck's avatar
Michael Beck committed
552
  /* @@@ does not work for InterfaceIII.java after cgana
553
554
555
     assert(get_Call_type(call) == get_entity_type(get_irg_ent(called_graph)));
     assert(smaller_type(get_entity_type(get_irg_ent(called_graph)),
     get_Call_type(call)));
556
  */
557
558
559
  assert(get_type_tpop(get_Call_type(call)) == type_method);
  if (called_graph == current_ir_graph) return;

560

Michael Beck's avatar
Michael Beck committed
561
/* --
562
563
      the procedure and later replaces the Start node of the called graph.
      Post_call is the old Call node and collects the results of the called
Michael Beck's avatar
Michael Beck committed
564
      graph. Both will end up being a tuple.  -- */
565
566
  post_bl = get_nodes_Block(call);
  set_irg_current_block(current_ir_graph, post_bl);
567
  /* XxMxPxP of Start + parameter of Call */
568
569
570
571
  in[0] = new_Jmp();
  in[1] = get_Call_mem(call);
  in[2] = get_irg_frame(current_ir_graph);
  in[3] = get_irg_globals(current_ir_graph);
572
  in[4] = new_Tuple (get_Call_n_params(call), get_Call_param_arr(call));
573
574
575
  pre_call = new_Tuple(5, in);
  post_call = call;

Michael Beck's avatar
Michael Beck committed
576
/* --
577
      The new block gets the ins of the old block, pre_call and all its
Michael Beck's avatar
Michael Beck committed
578
      predecessors and all Phi nodes. -- */
579
580
  part_block(pre_call);

Michael Beck's avatar
Michael Beck committed
581
  /* -- Prepare state for dead node elimination -- */
582
583
584
  /* Visited flags in calling irg must be >= flag in called irg.
     Else walker and arity computation will not work. */
  if (get_irg_visited(current_ir_graph) <= get_irg_visited(called_graph))
585
    set_irg_visited(current_ir_graph, get_irg_visited(called_graph)+1);
586
587
588
  if (get_irg_block_visited(current_ir_graph)< get_irg_block_visited(called_graph))
    set_irg_block_visited(current_ir_graph, get_irg_block_visited(called_graph));
  /* Set pre_call as new Start node in link field of the start node of
589
590
591
592
     calling graph and pre_calls block as new block for the start block
     of calling graph.
     Further mark these nodes so that they are not visited by the
     copying. */
593
594
  set_irn_link(get_irg_start(called_graph), pre_call);
  set_irn_visited(get_irg_start(called_graph),
595
		  get_irg_visited(current_ir_graph));
596
  set_irn_link(get_irg_start_block(called_graph),
597
	       get_nodes_Block(pre_call));
598
  set_irn_visited(get_irg_start_block(called_graph),
599
		  get_irg_visited(current_ir_graph));
600
601
602
603

  /* Initialize for compaction of in arrays */
  inc_irg_block_visited(current_ir_graph);

Michael Beck's avatar
Michael Beck committed
604
  /* -- Replicate local entities of the called_graph -- */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
605
606
  /* copy the entities. */
  called_frame = get_irg_frame_type(called_graph);
607
  for (i = 0; i < get_class_n_members(called_frame); i++) {
Götz Lindenmaier's avatar
Götz Lindenmaier committed
608
609
610
611
612
    entity *new_ent, *old_ent;
    old_ent = get_class_member(called_frame, i);
    new_ent = copy_entity_own(old_ent, get_cur_frame_type());
    set_entity_link(old_ent, new_ent);
  }
613
614

  /* visited is > than that of called graph.  With this trick visited will
615
616
     remain unchanged so that an outer walker, e.g., searching the call nodes
     to inline, calling this inline will not visit the inlined nodes. */
617
618
  set_irg_visited(current_ir_graph, get_irg_visited(current_ir_graph)-1);

Michael Beck's avatar
Michael Beck committed
619
  /* -- Performing dead node elimination inlines the graph -- */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
620
621
  /* Copies the nodes to the obstack of current_ir_graph. Updates links to new
     entities. */
622
  /* @@@ endless loops are not copied!! -- they should be, I think... */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
623
  irg_walk(get_irg_end(called_graph), copy_node_inline, copy_preds,
624
	   get_irg_frame_type(called_graph));
625
626
627
628
629
630

  /* Repair called_graph */
  set_irg_visited(called_graph, get_irg_visited(current_ir_graph));
  set_irg_block_visited(called_graph, get_irg_block_visited(current_ir_graph));
  set_Block_block_visited(get_irg_start_block(called_graph), 0);

Michael Beck's avatar
Michael Beck committed
631
  /* -- Merge the end of the inlined procedure with the call site -- */
Götz Lindenmaier's avatar
Götz Lindenmaier committed
632
633
634
635
636
  /* We will turn the old Call node into a Tuple with the following
     predecessors:
     -1:  Block of Tuple.
     0: Phi of all Memories of Return statements.
     1: Jmp from new Block that merges the control flow from all exception
637
	 predecessors of the old end block.
Götz Lindenmaier's avatar
Götz Lindenmaier committed
638
639
640
     2: Tuple of all arguments.
     3: Phi of Exception memories.
  */
641

Michael Beck's avatar
Michael Beck committed
642
  /* -- Precompute some values -- */
643
644
645
  end_bl = get_new_node(get_irg_end_block(called_graph));
  end = get_new_node(get_irg_end(called_graph));
  arity = get_irn_arity(end_bl);    /* arity = n_exc + n_ret  */
646
  n_res = get_method_n_ress(get_Call_type(call));
647

648
  res_pred = (ir_node **) malloc (n_res * sizeof (ir_node *));
649
650
651
652
  cf_pred = (ir_node **) malloc (arity * sizeof (ir_node *));

  set_irg_current_block(current_ir_graph, post_bl); /* just to make sure */

Michael Beck's avatar
Michael Beck committed
653
  /* -- archive keepalives -- */
654
655
  for (i = 0; i < get_irn_arity(end); i++)
    add_End_keepalive(get_irg_end(current_ir_graph), get_irn_n(end, i));
656
657
  /* The new end node will die, but the in array is not on the obstack ... */
  free_End(end);
658

Michael Beck's avatar
Michael Beck committed
659
660
/* --
      Return nodes by Jump nodes. -- */
661
662
663
664
665
666
667
668
669
670
671
  n_ret = 0;
  for (i = 0; i < arity; i++) {
    ir_node *ret;
    ret = get_irn_n(end_bl, i);
    if (get_irn_op(ret) == op_Return) {
      cf_pred[n_ret] = new_r_Jmp(current_ir_graph, get_nodes_Block(ret));
      n_ret++;
    }
  }
  set_irn_in(post_bl, n_ret, cf_pred);

Michael Beck's avatar
Michael Beck committed
672
673
/* --
      turned into a tuple.  -- */
674
675
676
677
678
679
680
681
682
683
684
685
  turn_into_tuple(post_call, 4);
  /* First the Memory-Phi */
  n_ret = 0;
  for (i = 0; i < arity; i++) {
    ret = get_irn_n(end_bl, i);
    if (get_irn_op(ret) == op_Return) {
      cf_pred[n_ret] = get_Return_mem(ret);
      n_ret++;
    }
  }
  phi = new_Phi(n_ret, cf_pred, mode_M);
  set_Tuple_pred(call, 0, phi);
686
687
688
689
690
  /* Conserve Phi-list for further inlinings -- but might be optimized */
  if (get_nodes_Block(phi) == post_bl) {
    set_irn_link(phi, get_irn_link(post_bl));
    set_irn_link(post_bl, phi);
  }
691
692
693
694
695
  /* Now the real results */
  if (n_res > 0) {
    for (j = 0; j < n_res; j++) {
      n_ret = 0;
      for (i = 0; i < arity; i++) {
696
697
698
699
700
	ret = get_irn_n(end_bl, i);
	if (get_irn_op(ret) == op_Return) {
	  cf_pred[n_ret] = get_Return_res(ret, j);
	  n_ret++;
	}
701
702
703
      }
      phi = new_Phi(n_ret, cf_pred, get_irn_mode(cf_pred[0]));
      res_pred[j] = phi;
704
705
706
707
708
      /* Conserve Phi-list for further inlinings -- but might be optimized */
      if (get_nodes_Block(phi) == post_bl) {
	set_irn_link(phi, get_irn_link(post_bl));
	set_irn_link(post_bl, phi);
      }
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
    }
    set_Tuple_pred(call, 2, new_Tuple(n_res, res_pred));
  } else {
    set_Tuple_pred(call, 2, new_Bad());
  }
  /* Finally the exception control flow.  We need to add a Phi node to
     collect the memory containing the exception objects.  Further we need
     to add another block to get a correct representation of this Phi.  To
     this block we add a Jmp that resolves into the X output of the Call
     when the Call is turned into a tuple. */
  n_exc = 0;
  for (i = 0; i < arity; i++) {
    ir_node *ret;
    ret = get_irn_n(end_bl, i);
    if (is_fragile_op(skip_Proj(ret)) || (get_irn_op(skip_Proj(ret)) == op_Raise)) {
      cf_pred[n_exc] = ret;
      n_exc++;
    }
  }
  if (n_exc > 0) {
729
    new_Block(n_exc, cf_pred);      /* watch it: current_block is changed! */
730
731
732
733
734
735
    set_Tuple_pred(call, 1, new_Jmp());
    /* The Phi for the memories with the exception objects */
    n_exc = 0;
    for (i = 0; i < arity; i++) {
      ir_node *ret;
      ret = skip_Proj(get_irn_n(end_bl, i));
Götz Lindenmaier's avatar
Götz Lindenmaier committed
736
      if (get_irn_op(ret) == op_Call) {
737
738
	cf_pred[n_exc] = new_r_Proj(current_ir_graph, get_nodes_Block(ret), ret, mode_M, 3);
	n_exc++;
Götz Lindenmaier's avatar
Götz Lindenmaier committed
739
      } else if (is_fragile_op(ret)) {
740
741
742
	/* We rely that all cfops have the memory output at the same position. */
	cf_pred[n_exc] = new_r_Proj(current_ir_graph, get_nodes_Block(ret), ret, mode_M, 0);
	n_exc++;
Götz Lindenmaier's avatar
Götz Lindenmaier committed
743
      } else if (get_irn_op(ret) == op_Raise) {
744
745
	cf_pred[n_exc] = new_r_Proj(current_ir_graph, get_nodes_Block(ret), ret, mode_M, 1);
	n_exc++;
746
747
748
749
750
751
752
753
754
755
      }
    }
    set_Tuple_pred(call, 3, new_Phi(n_exc, cf_pred, mode_M));
  } else {
    set_Tuple_pred(call, 1, new_Bad());
    set_Tuple_pred(call, 3, new_Bad());
  }
  free(res_pred);
  free(cf_pred);

Michael Beck's avatar
Michael Beck committed
756
/* --
757
758
759
760
       If the exception control flow from the Call directly branched to the
       end block we now have the following control flow predecessor pattern:
       ProjX -> Tuple -> Jmp.
       We must remove the Jmp along with it's empty block and add Jmp's
Michael Beck's avatar
Michael Beck committed
761
       predecessors as predecessors of this end block. -- */
762
763
764
765
766
767
768
  /* find the problematic predecessor of the end block. */
  end_bl = get_irg_end_block(current_ir_graph);
  for (i = 0; i < get_Block_n_cfgpreds(end_bl); i++) {
    cf_op = get_Block_cfgpred(end_bl, i);
    if (get_irn_op(cf_op) == op_Proj) {
      cf_op = get_Proj_pred(cf_op);
      if (get_irn_op(cf_op) == op_Tuple) {
769
770
771
	cf_op = get_Tuple_pred(cf_op, 1);
	assert(get_irn_op(cf_op) == op_Jmp);
	break;
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
      }
    }
  }
  /* repair */
  if (i < get_Block_n_cfgpreds(end_bl)) {
    bl = get_nodes_Block(cf_op);
    arity = get_Block_n_cfgpreds(end_bl) + get_Block_n_cfgpreds(bl) - 1;
    cf_pred = (ir_node **) malloc (arity * sizeof (ir_node *));
    for (j = 0; j < i; j++)
      cf_pred[j] = get_Block_cfgpred(end_bl, j);
    for (j = j; j < i + get_Block_n_cfgpreds(bl); j++)
      cf_pred[j] = get_Block_cfgpred(bl, j-i);
    for (j = j; j < arity; j++)
      cf_pred[j] = get_Block_cfgpred(end_bl, j-get_Block_n_cfgpreds(bl) +1);
    set_irn_in(end_bl, arity, cf_pred);
    free(cf_pred);
  }
789

Michael Beck's avatar
Michael Beck committed
790
  /* --  Turn cse back on. -- */
791
  set_optimize(rem_opt);
792
}
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815

/********************************************************************/
/* Apply inlineing to small methods.                                */
/********************************************************************/

static int pos;

/* It makes no sense to inline too many calls in one procedure. Anyways,
   I didn't get a version with NEW_ARR_F to run. */
#define MAX_INLINE 1024

static void collect_calls(ir_node *call, void *env) {
  ir_node **calls = (ir_node **)env;
  ir_node *addr;
  tarval *tv;
  ir_graph *called_irg;

  if (get_irn_op(call) != op_Call) return;

  addr = get_Call_ptr(call);
  if (get_irn_op(addr) == op_Const) {
    /* Check whether the constant is the pointer to a compiled entity. */
    tv = get_Const_tarval(addr);
Matthias Heil's avatar
Matthias Heil committed
816
817
    if (tv->u.P.ent) {
      called_irg = get_entity_irg(tv->u.P.ent);
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
      if (called_irg && pos < MAX_INLINE) {
	/* The Call node calls a locally defined method.  Remember to inline. */
	calls[pos] = call;
	pos++;
      }
    }
  }
}

/* Inlines all small methods at call sites where the called address comes
   from a Const node that references the entity representing the called
   method.
   The size argument is a rough measure for the code size of the method:
   Methods where the obstack containing the firm graph is smaller than
   size are inlined. */
void inline_small_irgs(ir_graph *irg, int size) {
  int i;
  ir_node *calls[MAX_INLINE];
  ir_graph *rem = current_ir_graph;

  if (!(get_optimize() && get_opt_inline())) return;

  current_ir_graph = irg;
  /* Handle graph state */
  assert(get_irg_phase_state(current_ir_graph) != phase_building);

  /* Find Call nodes to inline.
     (We can not inline during a walk of the graph, as inlineing the same
     method several times changes the visited flag of the walked graph:
     after the first inlineing visited of the callee equals visited of
     the caller.  With the next inlineing both are increased.) */
  pos = 0;
  irg_walk(get_irg_end(irg), NULL, collect_calls, (void *) calls);

  if ((pos > 0) && (pos < MAX_INLINE)) {
    /* There are calls to inline */
    collect_phiprojs(irg);
    for (i = 0; i < pos; i++) {
      tarval *tv;
      ir_graph *callee;
      tv = get_Const_tarval(get_Call_ptr(calls[i]));
Matthias Heil's avatar
Matthias Heil committed
859
      callee = get_entity_irg(tv->u.P.ent);
860
861
862
863
864
865
866
867
      if ((_obstack_memory_used(callee->obst) - obstack_room(callee->obst)) < size) {
	inline_method(calls[i], callee);
      }
    }
  }

  current_ir_graph = rem;
}
Götz Lindenmaier's avatar
Götz Lindenmaier committed
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943


/********************************************************************/
/*  Code Placement.  Pinns all floating nodes to a block where they */
/*  will be executed only if needed.                                */
/********************************************************************/

static pdeq *worklist;		/* worklist of ir_node*s */

/* Find the earliest correct block for N.  --- Place N into the
   same Block as its dominance-deepest Input.  */
static void
place_floats_early (ir_node *n)
{
  int i, start;

  /* we must not run into an infinite loop */
  assert (irn_not_visited(n));
  mark_irn_visited(n);

  /* Place floating nodes. */
  if (get_op_pinned(get_irn_op(n)) == floats) {
    int depth = 0;
    ir_node *b = new_Bad();   /* The block to place this node in */

    assert(get_irn_op(n) != op_Block);

    if ((get_irn_op(n) == op_Const) ||
	(get_irn_op(n) == op_SymConst) ||
	(is_Bad(n))) {
      /* These nodes will not be placed by the loop below. */
      b = get_irg_start_block(current_ir_graph);
      depth = 1;
    }

    /* find the block for this node. */
    for (i = 0; i < get_irn_arity(n); i++) {
      ir_node *dep = get_irn_n(n, i);
      ir_node *dep_block;
      if ((irn_not_visited(dep)) &&
	  (get_op_pinned(get_irn_op(dep)) == floats)) {
	place_floats_early (dep);
      }
      /* Because all loops contain at least one pinned node, now all
         our inputs are either pinned or place_early has already
         been finished on them.  We do not have any unfinished inputs!  */
      dep_block = get_nodes_Block(dep);
      if ((!is_Bad(dep_block)) &&
	  (get_Block_dom_depth(dep_block) > depth)) {
	b = dep_block;
	depth = get_Block_dom_depth(dep_block);
      }
      /* Avoid that the node is placed in the Start block */
      if ((depth == 1) && (get_Block_dom_depth(get_nodes_Block(n)) > 1)) {
	b = get_Block_cfg_out(get_irg_start_block(current_ir_graph), 0);
	assert(b != get_irg_start_block(current_ir_graph));
	depth = 2;
      }
    }
    set_nodes_Block(n, b);
  }

  /* Add predecessors of non floating nodes on worklist. */
  start = (get_irn_op(n) == op_Block) ? 0 : -1;
  for (i = start; i < get_irn_arity(n); i++) {
    ir_node *pred = get_irn_n(n, i);
    if (irn_not_visited(pred)) {
      pdeq_putr (worklist, pred);
    }
  }
}

/* Floating nodes form subgraphs that begin at nodes as Const, Load,
   Start, Call and end at pinned nodes as Store, Call.  Place_early
   places all floating nodes reachable from its argument through floating
   nodes and adds all beginnings at pinned nodes to the worklist. */
944
static INLINE void place_early (void) {
Götz Lindenmaier's avatar
Götz Lindenmaier committed
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
  assert(worklist);
  inc_irg_visited(current_ir_graph);

  /* this inits the worklist */
  place_floats_early (get_irg_end(current_ir_graph));

  /* Work the content of the worklist. */
  while (!pdeq_empty (worklist)) {
    ir_node *n = pdeq_getl (worklist);
    if (irn_not_visited(n)) place_floats_early (n);
  }

  set_irg_outs_inconsistent(current_ir_graph);
  current_ir_graph->pinned = pinned;
}


/* deepest common dominance ancestor of DCA and CONSUMER of PRODUCER */
static ir_node *
consumer_dom_dca (ir_node *dca, ir_node *consumer, ir_node *producer)
{
966
  ir_node *block = NULL;
Götz Lindenmaier's avatar
Götz Lindenmaier committed
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997

  /* Compute the latest block into which we can place a node so that it is
     before consumer. */
  if (get_irn_op(consumer) == op_Phi) {
    /* our comsumer is a Phi-node, the effective use is in all those
       blocks through which the Phi-node reaches producer */
    int i;
    ir_node *phi_block = get_nodes_Block(consumer);
    for (i = 0;  i < get_irn_arity(consumer); i++) {
      if (get_irn_n(consumer, i) == producer) {
	block = get_nodes_Block(get_Block_cfgpred(phi_block, i));
      }
    }
  } else {
    assert(is_no_Block(consumer));
    block = get_nodes_Block(consumer);
  }

  /* Compute the deepest common ancestor of block and dca. */
  assert(block);
  if (!dca) return block;
  while (get_Block_dom_depth(block) > get_Block_dom_depth(dca))
    block = get_Block_idom(block);
  while (get_Block_dom_depth(dca) > get_Block_dom_depth(block))
    dca = get_Block_idom(dca);
  while (block != dca)
    { block = get_Block_idom(block); dca = get_Block_idom(dca); }

  return dca;
}

998
static INLINE int get_irn_loop_depth(ir_node *n) {
999
1000
  return get_loop_depth(get_irn_loop(n));
}
For faster browsing, not all history is shown. View entire blame