benewalloc.c 32.4 KB
Newer Older
Matthias Braun's avatar
Matthias Braun committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
/*
 * Copyright (C) 1995-2008 University of Karlsruhe.  All right reserved.
 *
 * This file is part of libFirm.
 *
 * This file may be distributed and/or modified under the terms of the
 * GNU General Public License version 2 as published by the Free Software
 * Foundation and appearing in the file LICENSE.GPL included in the
 * packaging of this file.
 *
 * Licensees holding valid libFirm Professional Edition licenses may use
 * this file in accordance with the libFirm Commercial License.
 * Agreement provided with the Software.
 *
 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR
 * PURPOSE.
 */

/**
 * @file
 * @brief       New approach to allocation and copy coalescing
 * @author      Matthias Braun
 * @date        14.2.2009
 * @version     $Id$
 *
 * ... WE NEED A NAME FOR THIS ...
 *
 * Only a proof of concept at this moment...
 *
 * The idea is to allocate registers in 2 passes:
Michael Beck's avatar
Michael Beck committed
32
 * 1. A first pass to determine "preferred" registers for live-ranges. This
Matthias Braun's avatar
Matthias Braun committed
33
 *    calculates for each register and each live-range a value indicating
Michael Beck's avatar
Michael Beck committed
34
 *    the usefulness. (You can roughly think of the value as the negative
Matthias Braun's avatar
Matthias Braun committed
35
36
 *    costs needed for copies when the value is in the specific registers...)
 *
37
38
39
 * 2. Walk blocks and assigns registers in a greedy fashion. Preferring
 *    registers with high preferences. When register constraints are not met,
 *    add copies and split live-ranges.
Matthias Braun's avatar
Matthias Braun committed
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
 *
 * TODO:
 *  - output constraints are not ensured. The algorithm fails to copy values
 *    away, so the registers for constrained outputs are free.
 *  - must_be_different constraint is not respected
 *  - No parallel copies at basic block borders are created, no additional phis
 *    created after copies have been inserted.
 *  - Phi color assignment should give bonus points towards registers already
 *    assigned at predecessors.
 *  - think about a smarter sequence of visiting the blocks. Sorted by
 *    execfreq might be good, or looptree from inner to outermost loops going
 *    over blocks in a reverse postorder
 */
#include "config.h"

#include <float.h>

#include "obst.h"
#include "irnode_t.h"
#include "irgraph_t.h"
#include "iredges_t.h"
#include "ircons.h"
#include "irgwalk.h"
#include "execfreq.h"

#include "be.h"
#include "bera.h"
#include "belive_t.h"
#include "bemodule.h"
#include "bechordal_t.h"
#include "besched_t.h"
#include "beirg_t.h"
#include "benode_t.h"
73
#include "bespill.h"
Matthias Braun's avatar
Matthias Braun committed
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
#include "bespilloptions.h"
#include "beverify.h"

#include "bipartite.h"
#include "hungarian.h"

#define USE_FACTOR       1.0f
#define DEF_FACTOR       1.0f
#define NEIGHBOR_FACTOR  0.2f
#define SHOULD_BE_SAME   1.0f

DEBUG_ONLY(static firm_dbg_module_t *dbg = NULL;)

static struct obstack               obst;
static be_irg_t                    *birg;
static ir_graph                    *irg;
static const arch_register_class_t *cls;
static be_lv_t                     *lv;
static const ir_exec_freq          *execfreqs;
static unsigned                     n_regs;
static bitset_t                    *ignore_regs;

96
/** info about the current assignment for a register */
Matthias Braun's avatar
Matthias Braun committed
97
98
99
struct assignment_t {
	ir_node *value;            /**< currently assigned value */
};
100
typedef struct assignment_t assignment_t;
Matthias Braun's avatar
Matthias Braun committed
101

102
/** currently active assignments (while processing a basic block) */
Matthias Braun's avatar
Matthias Braun committed
103
104
static assignment_t *assignments;

105
106
107
108
/**
 * allocation information: last_uses, register preferences
 * the information is per firm-node.
 */
Matthias Braun's avatar
Matthias Braun committed
109
110
111
112
113
struct allocation_info_t {
	unsigned      last_uses;   /**< bitset indicating last uses (input pos) */
	assignment_t *current_assignment;
	float         prefs[0];    /**< register preferences */
};
114
typedef struct allocation_info_t allocation_info_t;
Matthias Braun's avatar
Matthias Braun committed
115

116
/** helper datastructure used when sorting register preferences */
Matthias Braun's avatar
Matthias Braun committed
117
118
119
120
struct reg_pref_t {
	unsigned num;
	float    pref;
};
121
122
123
124
125
126
127
128
typedef struct reg_pref_t reg_pref_t;

/** per basic-block information */
struct block_info_t {
	int          processed;       /**< indicate wether block is processed */
	assignment_t assignments[0];  /**< register assignments at end of block */
};
typedef struct block_info_t block_info_t;
Matthias Braun's avatar
Matthias Braun committed
129

130
131
132
133
/**
 * Get the allocation info for a node.
 * The info is allocated on the first visit of a node.
 */
Matthias Braun's avatar
Matthias Braun committed
134
135
136
137
static allocation_info_t *get_allocation_info(ir_node *node)
{
	allocation_info_t *info;
	if (!irn_visited(node)) {
138
		size_t size = sizeof(info[0]) + n_regs * sizeof(info->prefs[0]);
Matthias Braun's avatar
Matthias Braun committed
139
140
141
142
143
144
145
146
147
148
149
		info = obstack_alloc(&obst, size);
		memset(info, 0, size);
		set_irn_link(node, info);
		mark_irn_visited(node);
	} else {
		info = get_irn_link(node);
	}

	return info;
}

150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
/**
 * Get allocation information for a basic block
 */
static block_info_t *get_block_info(ir_node *block)
{
	block_info_t *info;

	assert(is_Block(block));
	if (!irn_visited(block)) {
		size_t size = sizeof(info[0]) + n_regs * sizeof(info->assignments[0]);
		info = obstack_alloc(&obst, size);
		memset(info, 0, size);
		set_irn_link(block, info);
		mark_irn_visited(block);
	} else {
		info = get_irn_link(block);
	}

	return info;
}

171
172
173
174
175
/**
 * Link the allocation info of a node to a copy.
 * Afterwards, both nodes uses the same allocation info.
 * Copy must not have an allocation info assigned yet.
 *
176
 * @param copy   the node that gets the allocation info assigned
177
178
 * @param value  the original node
 */
Matthias Braun's avatar
Matthias Braun committed
179
180
181
static void link_to(ir_node *copy, ir_node *value)
{
	allocation_info_t *info = get_allocation_info(value);
Michael Beck's avatar
Michael Beck committed
182
	assert(!irn_visited(copy));
Matthias Braun's avatar
Matthias Braun committed
183
184
185
186
	set_irn_link(copy, info);
	mark_irn_visited(copy);
}

187
188
189
190
191
192
193
194
/**
 * Calculate the penalties for every register on a node and its live neighbors.
 *
 * @param live_nodes   the set of live nodes at the current position, may be NULL
 * @param penalty      the penalty to subtract from
 * @param limited      a raw bitset containing the limited set for the node
 * @param node         the node
 */
Matthias Braun's avatar
Matthias Braun committed
195
196
197
198
static void give_penalties_for_limits(const ir_nodeset_t *live_nodes,
                                      float penalty, const unsigned* limited,
									  ir_node *node)
{
Michael Beck's avatar
Michael Beck committed
199
200
	ir_nodeset_iterator_t iter;
	unsigned              r;
Matthias Braun's avatar
Matthias Braun committed
201
	allocation_info_t     *info = get_allocation_info(node);
Michael Beck's avatar
Michael Beck committed
202
	ir_node               *neighbor;
Matthias Braun's avatar
Matthias Braun committed
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235

	/* give penalty for all forbidden regs */
	for (r = 0; r < n_regs; ++r) {
		if (rbitset_is_set(limited, r))
			continue;

		info->prefs[r] -= penalty;
	}

	/* all other live values should get a penalty for allowed regs */
	if (live_nodes == NULL)
		return;

	/* TODO: reduce penalty if there are multiple allowed registers... */
	penalty *= NEIGHBOR_FACTOR;
	foreach_ir_nodeset(live_nodes, neighbor, iter) {
		allocation_info_t *neighbor_info;

		/* TODO: if op is used on multiple inputs we might not do a
		 * continue here */
		if (neighbor == node)
			continue;

	   	neighbor_info = get_allocation_info(neighbor);
		for (r = 0; r < n_regs; ++r) {
			if (!rbitset_is_set(limited, r))
				continue;

			neighbor_info->prefs[r] -= penalty;
		}
	}
}

236
237
238
239
240
241
242
243
244
/**
 * Calculate the preferences of a definition for the current register class.
 * If the definition uses a limited set of registers, reduce the preferences
 * for the limited register on the node and its neighbors.
 *
 * @param live_nodes  the set of live nodes at the current node
 * @param weight      the weight
 * @param node        the current node
 */
Matthias Braun's avatar
Matthias Braun committed
245
246
247
static void check_defs(const ir_nodeset_t *live_nodes, float weight,
                       ir_node *node)
{
Michael Beck's avatar
Michael Beck committed
248
249
	const arch_register_req_t *req;

Matthias Braun's avatar
Matthias Braun committed
250
251
252
253
254
255
256
257
258
259
260
261
	if (get_irn_mode(node) == mode_T) {
		const ir_edge_t *edge;
		foreach_out_edge(node, edge) {
			ir_node *proj = get_edge_src_irn(edge);
			check_defs(live_nodes, weight, proj);
		}
		return;
	}

	if (!arch_irn_consider_in_reg_alloc(cls, node))
		return;

Michael Beck's avatar
Michael Beck committed
262
	req = arch_get_register_req_out(node);
Matthias Braun's avatar
Matthias Braun committed
263
264
265
266
267
268
269
270
271
272
273
274
275
276
	if (req->type & arch_register_req_type_limited) {
		const unsigned *limited = req->limited;
		float           penalty = weight * DEF_FACTOR;
		give_penalties_for_limits(live_nodes, penalty, limited, node);
	}

	if (req->type & arch_register_req_type_should_be_same) {
		ir_node           *insn  = skip_Proj(node);
		allocation_info_t *info  = get_allocation_info(node);
		int                arity = get_irn_arity(insn);
		int                i;

		float factor = 1.0f / rbitset_popcnt(&req->other_same, arity);
		for (i = 0; i < arity; ++i) {
Michael Beck's avatar
Michael Beck committed
277
278
279
280
			ir_node           *op;
			unsigned          r;
			allocation_info_t *op_info;

Matthias Braun's avatar
Matthias Braun committed
281
282
283
			if (!rbitset_is_set(&req->other_same, i))
				continue;

Michael Beck's avatar
Michael Beck committed
284
285
			op      = get_irn_n(insn, i);
			op_info = get_allocation_info(op);
Matthias Braun's avatar
Matthias Braun committed
286
287
288
289
290
291
292
293
294
			for (r = 0; r < n_regs; ++r) {
				if (bitset_is_set(ignore_regs, r))
					continue;
				op_info->prefs[r] += info->prefs[r] * factor;
			}
		}
	}
}

295
296
297
298
/**
 * Walker: Runs an a block calculates the preferences for any
 * node and every register from the considered register class.
 */
Matthias Braun's avatar
Matthias Braun committed
299
300
301
302
303
304
305
306
307
308
309
static void analyze_block(ir_node *block, void *data)
{
	float         weight = get_block_execfreq(execfreqs, block);
	ir_nodeset_t  live_nodes;
	ir_node      *node;
	(void) data;

	ir_nodeset_init(&live_nodes);
	be_liveness_end_of_block(lv, cls, block, &live_nodes);

	sched_foreach_reverse(block, node) {
Michael Beck's avatar
Michael Beck committed
310
311
		allocation_info_t *info;
		int               i, arity;
Matthias Braun's avatar
Matthias Braun committed
312

Michael Beck's avatar
Michael Beck committed
313
		if (is_Phi(node)) {
Matthias Braun's avatar
Matthias Braun committed
314
315
316
317
318
319
320
321
322
323
324
325
326
			/* TODO: handle constrained phi-nodes */
			break;
		}

		/* TODO give/take penalties for should_be_same/different) */
		check_defs(&live_nodes, weight, node);

		/* mark last uses */
		arity = get_irn_arity(node);
		/* I was lazy, and only allocated 1 unsigned
		   => maximum of 32 uses per node (rewrite if necessary) */
		assert(arity <= (int) sizeof(unsigned) * 8);

Michael Beck's avatar
Michael Beck committed
327
		info = get_allocation_info(node);
Matthias Braun's avatar
Matthias Braun committed
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
		for (i = 0; i < arity; ++i) {
			ir_node *op = get_irn_n(node, i);
			if (!arch_irn_consider_in_reg_alloc(cls, op))
				continue;

			/* last usage of a value? */
			if (!ir_nodeset_contains(&live_nodes, op)) {
				rbitset_set(&info->last_uses, i);
			}
		}

		be_liveness_transfer(cls, node, &live_nodes);

		/* update weights based on usage constraints */
		for (i = 0; i < arity; ++i) {
Michael Beck's avatar
Michael Beck committed
343
344
345
346
			const arch_register_req_t *req;
			const unsigned            *limited;
			ir_node                   *op = get_irn_n(node, i);

Matthias Braun's avatar
Matthias Braun committed
347
348
349
			if (!arch_irn_consider_in_reg_alloc(cls, op))
				continue;

Michael Beck's avatar
Michael Beck committed
350
			req = arch_get_register_req(node, i);
Matthias Braun's avatar
Matthias Braun committed
351
352
353
354
355
			if ((req->type & arch_register_req_type_limited) == 0)
				continue;

			/* TODO: give penalties to neighbors for precolored nodes! */

Michael Beck's avatar
Michael Beck committed
356
			limited = req->limited;
Matthias Braun's avatar
Matthias Braun committed
357
358
359
360
361
362
363
364
			give_penalties_for_limits(&live_nodes, weight * USE_FACTOR, limited,
			                          op);
		}
	}

	ir_nodeset_destroy(&live_nodes);
}

365
366
367
368
369
370
/**
 * Assign register reg to the given node.
 *
 * @param node  the node
 * @param reg   the register
 */
Matthias Braun's avatar
Matthias Braun committed
371
372
static void use_reg(ir_node *node, const arch_register_t *reg)
{
373
374
	unsigned           r          = arch_register_get_index(reg);
	assignment_t      *assignment = &assignments[r];
Michael Beck's avatar
Michael Beck committed
375
	allocation_info_t *info;
Matthias Braun's avatar
Matthias Braun committed
376
377
378
379

	assert(assignment->value == NULL);
	assignment->value = node;

Michael Beck's avatar
Michael Beck committed
380
	info = get_allocation_info(node);
Matthias Braun's avatar
Matthias Braun committed
381
382
383
384
385
	info->current_assignment = assignment;

	arch_set_irn_register(node, reg);
}

386
387
388
/**
 * Compare two register preferences in decreasing order.
 */
Matthias Braun's avatar
Matthias Braun committed
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
static int compare_reg_pref(const void *e1, const void *e2)
{
	const reg_pref_t *rp1 = (const reg_pref_t*) e1;
	const reg_pref_t *rp2 = (const reg_pref_t*) e2;
	if (rp1->pref < rp2->pref)
		return 1;
	if (rp1->pref > rp2->pref)
		return -1;
	return 0;
}

static void fill_sort_candidates(reg_pref_t *regprefs,
                                 const allocation_info_t *info)
{
	unsigned r;

	for (r = 0; r < n_regs; ++r) {
		float pref = info->prefs[r];
		if (bitset_is_set(ignore_regs, r)) {
			pref = -10000;
		}
		regprefs[r].num  = r;
		regprefs[r].pref = pref;
	}
	/* TODO: use a stable sort here to avoid unnecessary register jumping */
	qsort(regprefs, n_regs, sizeof(regprefs[0]), compare_reg_pref);
}

417
418
419
/**
 * Determine and assign a register for node @p node
 */
Matthias Braun's avatar
Matthias Braun committed
420
421
static void assign_reg(const ir_node *block, ir_node *node)
{
Michael Beck's avatar
Michael Beck committed
422
423
424
425
	const arch_register_t     *reg;
	allocation_info_t         *info;
	const arch_register_req_t *req;
	reg_pref_t                *reg_prefs;
Michael Beck's avatar
Michael Beck committed
426
	ir_node                   *in_node;
Michael Beck's avatar
Michael Beck committed
427
428
	unsigned                  i;

Matthias Braun's avatar
Matthias Braun committed
429
430
431
	assert(arch_irn_consider_in_reg_alloc(cls, node));

	/* preassigned register? */
Michael Beck's avatar
Michael Beck committed
432
	reg = arch_get_irn_register(node);
Matthias Braun's avatar
Matthias Braun committed
433
434
435
436
437
438
439
	if (reg != NULL) {
		DB((dbg, LEVEL_2, "Preassignment %+F -> %s\n", node, reg->name));
		use_reg(node, reg);
		return;
	}

	/* give should_be_same boni */
Michael Beck's avatar
Michael Beck committed
440
441
	info = get_allocation_info(node);
	req  = arch_get_register_req_out(node);
442

Michael Beck's avatar
Michael Beck committed
443
	in_node = skip_Proj(node);
Matthias Braun's avatar
Matthias Braun committed
444
445
	if (req->type & arch_register_req_type_should_be_same) {
		float weight = get_block_execfreq(execfreqs, block);
446
		int   arity  = get_irn_arity(in_node);
Matthias Braun's avatar
Matthias Braun committed
447
		int   i;
Michael Beck's avatar
Michael Beck committed
448

Matthias Braun's avatar
Matthias Braun committed
449
450
451
452
453
454
455
456
		assert(arity <= (int) sizeof(req->other_same) * 8);
		for (i = 0; i < arity; ++i) {
			ir_node               *in;
			const arch_register_t *reg;
			unsigned               r;
			if (!rbitset_is_set(&req->other_same, i))
				continue;

457
			in  = get_irn_n(in_node, i);
Matthias Braun's avatar
Matthias Braun committed
458
459
460
461
462
463
464
465
466
467
468
469
470
			reg = arch_get_irn_register(in);
			assert(reg != NULL);
			r = arch_register_get_index(reg);
			if (bitset_is_set(ignore_regs, r))
				continue;
			info->prefs[r] += weight * SHOULD_BE_SAME;
		}
	}

	/* TODO: handle must_be_different */

	/*  */
	DB((dbg, LEVEL_2, "Candidates for %+F:", node));
Michael Beck's avatar
Michael Beck committed
471
	reg_prefs = alloca(n_regs * sizeof(reg_prefs[0]));
Matthias Braun's avatar
Matthias Braun committed
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
	fill_sort_candidates(reg_prefs, info);
	for (i = 0; i < n_regs; ++i) {
		unsigned               num = reg_prefs[i].num;
		const arch_register_t *reg = arch_register_for_index(cls, num);
		DB((dbg, LEVEL_2, " %s(%f)", reg->name, reg_prefs[i].pref));
	}
	DB((dbg, LEVEL_2, "\n"));

	for (i = 0; i < n_regs; ++i) {
		unsigned r = reg_prefs[i].num;
		/* ignores should be last and we should have a non-ignore left */
		assert(!bitset_is_set(ignore_regs, r));
		/* already used? TODO: It might be better to copy the value occupying the register around here, find out when... */
		if (assignments[r].value != NULL)
			continue;
487
488
489
		reg = arch_register_for_index(cls, r);
		DB((dbg, LEVEL_2, "Assign %+F -> %s\n", node, reg->name));
		use_reg(node, reg);
Matthias Braun's avatar
Matthias Braun committed
490
491
492
493
494
495
		break;
	}
}

static void free_reg_of_value(ir_node *node)
{
Michael Beck's avatar
Michael Beck committed
496
497
498
499
	allocation_info_t *info;
	assignment_t      *assignment;
	unsigned          r;

Matthias Braun's avatar
Matthias Braun committed
500
501
502
	if (!arch_irn_consider_in_reg_alloc(cls, node))
		return;

Michael Beck's avatar
Michael Beck committed
503
504
	info       = get_allocation_info(node);
	assignment = info->current_assignment;
Matthias Braun's avatar
Matthias Braun committed
505
506
507

	assert(assignment != NULL);

Michael Beck's avatar
Michael Beck committed
508
	r = assignment - assignments;
Matthias Braun's avatar
Matthias Braun committed
509
510
511
512
513
514
	DB((dbg, LEVEL_2, "Value %+F ended, freeing %s\n",
		node, arch_register_for_index(cls, r)->name));
	assignment->value        = NULL;
	info->current_assignment = NULL;
}

515
516
517
/**
 * Return the index of the currently assigned register of a node.
 */
Matthias Braun's avatar
Matthias Braun committed
518
519
520
521
522
523
524
static unsigned get_current_reg(ir_node *node)
{
	allocation_info_t *info       = get_allocation_info(node);
	assignment_t      *assignment = info->current_assignment;
	return assignment - assignments;
}

525
526
527
/**
 * Return the currently assigned assignment of a node.
 */
Matthias Braun's avatar
Matthias Braun committed
528
529
530
531
532
533
static assignment_t *get_current_assignment(ir_node *node)
{
	allocation_info_t *info = get_allocation_info(node);
	return info->current_assignment;
}

534
535
536
537
/**
 * Add an permutation in front of a node and change the assignments
 * due to this permutation.
 *
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
 * To understand this imagine a permutation like this:
 *
 * 1 -> 2
 * 2 -> 3
 * 3 -> 1, 5
 * 4 -> 6
 * 5
 * 6
 * 7 -> 7
 *
 * First we count how many destinations a single value has. At the same time
 * we can be sure that each destination register has at most 1 source register
 * (it can have 0 which means we don't care what value is in it).
 * We ignore all fullfilled permuations (like 7->7)
 * In a first pass we create as much copy instructions as possible as they
 * are generally cheaper than exchanges. We do this by counting into how many
 * destinations a register has to be copied (in the example it's 2 for register
 * 3, or 1 for the registers 1,2,4 and 7).
 * We can then create a copy into every destination register when the usecount
 * of that register is 0 (= noone else needs the value in the register).
 *
 * After this step we should have cycles left. We implement a cyclic permutation
 * of n registers with n-1 transpositions.
 *
562
563
 * @param live_nodes   the set of live nodes, updated due to live range split
 * @param before       the node before we add the permutation
564
565
566
 * @param permutation  the permutation array indices are the destination
 *                     registers, the values in the array are the source
 *                     registers.
567
 */
Matthias Braun's avatar
Matthias Braun committed
568
569
570
static void permutate_values(ir_nodeset_t *live_nodes, ir_node *before,
                             unsigned *permutation)
{
571
572
573
574
	ir_node   *block;
	ir_node  **ins    = ALLOCANZ(ir_node*, n_regs);
	unsigned  *n_used = ALLOCANZ(unsigned, n_regs);
	unsigned   r;
Matthias Braun's avatar
Matthias Braun committed
575

576
	/* create a list of permutations. Leave out fix points. */
Matthias Braun's avatar
Matthias Braun committed
577
	for (r = 0; r < n_regs; ++r) {
578
		unsigned      old_reg = permutation[r];
Michael Beck's avatar
Michael Beck committed
579
580
581
		assignment_t *assignment;
		ir_node      *value;

582
583
		/* no need to do anything for a fixpoint */
		if (old_reg == r)
Matthias Braun's avatar
Matthias Braun committed
584
585
			continue;

586
		assignment = &assignments[old_reg];
Michael Beck's avatar
Michael Beck committed
587
		value      = assignment->value;
Matthias Braun's avatar
Matthias Braun committed
588
		if (value == NULL) {
589
590
			/* nothing to do here, reg is not live. Mark it as fixpoint
			 * so we ignore it in the next steps */
Matthias Braun's avatar
Matthias Braun committed
591
592
593
594
			permutation[r] = r;
			continue;
		}

595
596
		ins[old_reg] = value;
		++n_used[old_reg];
Matthias Braun's avatar
Matthias Braun committed
597

598
		/* free occupation infos, we'll add the values back later */
599
600
601
602
		if (live_nodes != NULL) {
			free_reg_of_value(value);
			ir_nodeset_remove(live_nodes, value);
		}
Matthias Braun's avatar
Matthias Braun committed
603
604
	}

Michael Beck's avatar
Michael Beck committed
605
	block = get_nodes_block(before);
Matthias Braun's avatar
Matthias Braun committed
606

607
608
609
	/* step1: create copies where immediately possible */
	for (r = 0; r < n_regs; /* empty */) {
		ir_node *copy;
610
		ir_node *src;
Michael Beck's avatar
Michael Beck committed
611
		const arch_register_t *reg;
612
		unsigned               old_r = permutation[r];
Michael Beck's avatar
Michael Beck committed
613

614
615
616
		/* - no need to do anything for fixed points.
		   - we can't copy if the value in the dest reg is still needed */
		if (old_r == r || n_used[r] > 0) {
617
			++r;
Matthias Braun's avatar
Matthias Braun committed
618
			continue;
619
		}
Matthias Braun's avatar
Matthias Braun committed
620

621
		/* create a copy */
622
		src = ins[old_r];
623
624
625
626
627
628
629
		copy = be_new_Copy(cls, block, src);
		reg = arch_register_for_index(cls, r);
		DB((dbg, LEVEL_2, "Copy %+F (from %+F) -> %s\n", copy, src, reg->name));
		link_to(copy, src);
		use_reg(copy, reg);
		sched_add_before(before, copy);

630
631
		/* old register has 1 user less, permutation is resolved */
		assert(arch_register_get_index(arch_get_irn_register(src)) == old_r);
632
633
		assert(n_used[old_r] > 0);
		--n_used[old_r];
634
		permutation[r] = r;
635

636
637
		/* advance or jump back (this copy could have enabled another copy) */
		if (old_r < r && n_used[old_r] == 0) {
638
			r = old_r;
639
		} else {
640
			++r;
641
		}
642
	}
Matthias Braun's avatar
Matthias Braun committed
643

644
645
646
647
648
649
650
651
	/* at this point we only have "cycles" left which we have to resolve with
	 * perm instructions
	 * TODO: if we have free registers left, then we should really use copy
	 * instructions for any cycle longer than 2 registers...
	 * (this is probably architecture dependent, there might be archs where
	 *  copies are preferable even for 2 cycles)
	 */

652
	/* create perms with the rest */
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
	for (r = 0; r < n_regs; /* empty */) {
		const arch_register_t *reg;
		unsigned  old_r = permutation[r];
		unsigned  r2;
		ir_node  *in[2];
		ir_node  *perm;
		ir_node  *proj0;
		ir_node  *proj1;

		if (old_r == r) {
			++r;
			continue;
		}

		/* we shouldn't have copies from 1 value to multiple destinations left*/
		assert(n_used[old_r] == 1);

		/* exchange old_r and r2; after that old_r is a fixed point */
		r2 = permutation[old_r];

		in[0] = ins[r2];
		in[1] = ins[old_r];
		perm = be_new_Perm(cls, block, 2, in);

		proj0 = new_r_Proj(block, perm, get_irn_mode(in[0]), 0);
		link_to(proj0, in[0]);
		reg = arch_register_for_index(cls, old_r);
		use_reg(proj0, reg);

		proj1 = new_r_Proj(block, perm, get_irn_mode(in[1]), 1);

		/* 1 value is now in the correct register */
		permutation[old_r] = old_r;
		/* the source of r changed to r2 */
		permutation[r] = r2;
		ins[r2] = in[1];
		reg = arch_register_for_index(cls, r2);
		if (r == r2) {
			/* if we have reached a fixpoint update data structures */
			link_to(proj1, in[1]);
			use_reg(proj1, reg);
		} else {
			arch_set_irn_register(proj1, reg);
696
		}
Matthias Braun's avatar
Matthias Braun committed
697
	}
698
699
700
701
702
703
704

#ifdef DEBUG_libfirm
	/* now we should only have fixpoints left */
	for (r = 0; r < n_regs; ++r) {
		assert(permutation[r] == r);
	}
#endif
Matthias Braun's avatar
Matthias Braun committed
705
706
}

707
708
709
710
711
712
/**
 * Free regs for values last used.
 *
 * @param live_nodes   set of live nodes, will be updated
 * @param node         the node to consider
 */
Matthias Braun's avatar
Matthias Braun committed
713
714
715
716
717
718
static void free_last_uses(ir_nodeset_t *live_nodes, ir_node *node)
{
	allocation_info_t *info  = get_allocation_info(node);
	int                arity = get_irn_arity(node);
	int                i;
	for (i = 0; i < arity; ++i) {
Michael Beck's avatar
Michael Beck committed
719
720
		ir_node *op;

721
		/* check if one operand is the last use */
Matthias Braun's avatar
Matthias Braun committed
722
723
724
		if (!rbitset_is_set(&info->last_uses, i))
			continue;

Michael Beck's avatar
Michael Beck committed
725
		op = get_irn_n(node, i);
Matthias Braun's avatar
Matthias Braun committed
726
727
728
729
730
		free_reg_of_value(op);
		ir_nodeset_remove(live_nodes, op);
	}
}

731
732
733
734
735
736
/**
 * Enforce constraints at a node by live range splits.
 *
 * @param live_nodes  the set of live nodes, might be changed
 * @param node        the current node
 */
Matthias Braun's avatar
Matthias Braun committed
737
738
739
static void enforce_constraints(ir_nodeset_t *live_nodes, ir_node *node)
{
	int arity = get_irn_arity(node);
Michael Beck's avatar
Michael Beck committed
740
741
742
743
	int i, dummy, res;
	hungarian_problem_t *bp;
	unsigned l, r, p;
	unsigned *assignment;
Matthias Braun's avatar
Matthias Braun committed
744
745
746
747

	/* see if any use constraints are not met */
	bool good = true;
	for (i = 0; i < arity; ++i) {
Michael Beck's avatar
Michael Beck committed
748
749
750
751
752
		ir_node                   *op = get_irn_n(node, i);
		const arch_register_req_t *req;
		const unsigned            *limited;
		unsigned                  r;

Matthias Braun's avatar
Matthias Braun committed
753
754
755
		if (!arch_irn_consider_in_reg_alloc(cls, op))
			continue;

756
		/* are there any limitations for the i'th operand? */
Michael Beck's avatar
Michael Beck committed
757
		req = arch_get_register_req(node, i);
Matthias Braun's avatar
Matthias Braun committed
758
759
760
		if ((req->type & arch_register_req_type_limited) == 0)
			continue;

Michael Beck's avatar
Michael Beck committed
761
762
		limited = req->limited;
		r       = get_current_reg(op);
Matthias Braun's avatar
Matthias Braun committed
763
		if (!rbitset_is_set(limited, r)) {
764
			/* found an assignement outside the limited set */
Matthias Braun's avatar
Matthias Braun committed
765
766
767
768
769
770
771
772
773
			good = false;
			break;
		}
	}

	if (good)
		return;

	/* swap values around */
Michael Beck's avatar
Michael Beck committed
774
	bp = hungarian_new(n_regs, n_regs, HUNGARIAN_MATCH_PERFECT);
Matthias Braun's avatar
Matthias Braun committed
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791

	/* add all combinations, then remove not allowed ones */
	for (l = 0; l < n_regs; ++l) {
		if (bitset_is_set(ignore_regs, l)) {
			hungarian_add(bp, l, l, 90);
			continue;
		}

		for (r = 0; r < n_regs; ++r) {
			if (bitset_is_set(ignore_regs, r))
				continue;

			hungarian_add(bp, l, r, l == r ? 90 : 89);
		}
	}

	for (i = 0; i < arity; ++i) {
Michael Beck's avatar
Michael Beck committed
792
793
794
795
796
		ir_node                   *op = get_irn_n(node, i);
		const arch_register_req_t *req;
		const unsigned            *limited;
		unsigned                  current_reg;

Matthias Braun's avatar
Matthias Braun committed
797
798
799
		if (!arch_irn_consider_in_reg_alloc(cls, op))
			continue;

Michael Beck's avatar
Michael Beck committed
800
		req = arch_get_register_req(node, i);
Matthias Braun's avatar
Matthias Braun committed
801
802
803
		if ((req->type & arch_register_req_type_limited) == 0)
			continue;

Michael Beck's avatar
Michael Beck committed
804
805
		limited     = req->limited;
		current_reg = get_current_reg(op);
Matthias Braun's avatar
Matthias Braun committed
806
807
808
809
810
811
812
813
814
815
		for (r = 0; r < n_regs; ++r) {
			if (rbitset_is_set(limited, r))
				continue;
			hungarian_remv(bp, current_reg, r);
		}
	}

	hungarian_print_costmatrix(bp, 1);
	hungarian_prepare_cost_matrix(bp, HUNGARIAN_MODE_MAXIMIZE_UTIL);

Michael Beck's avatar
Michael Beck committed
816
817
	assignment = ALLOCAN(unsigned, n_regs);
	res = hungarian_solve(bp, (int*) assignment, &dummy, 0);
Matthias Braun's avatar
Matthias Braun committed
818
819
820
821
822
823
824
825
826
827
828
829
830
	assert(res == 0);

	printf("Swap result:");
	for (p = 0; p < n_regs; ++p) {
		printf(" %d", assignment[p]);
	}
	printf("\n");

	hungarian_free(bp);

	permutate_values(live_nodes, node, assignment);
}

831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
/** test wether a node @p n is a copy of the value of node @p of */
static int is_copy_of(ir_node *n, ir_node *of)
{
	allocation_info_t *of_info;

	if (n == NULL)
		return 0;

	if (n == of)
		return 1;

	of_info = get_allocation_info(of);
	if (!irn_visited(n))
		return 0;

	return of_info == get_irn_link(n);
}

/** find a value in the end-assignment of a basic block
 * @returns the index into the assignment array if found
 *          -1 if not found
 */
static int find_value_in_block_info(block_info_t *info, ir_node *value)
{
	unsigned      r;
	assignment_t *assignments = info->assignments;
	for (r = 0; r < n_regs; ++r) {
		const assignment_t *assignment = &assignments[r];
		if (is_copy_of(assignment->value, value))
			return (int) r;
	}

	return -1;
}

/**
 * Create the necessary permutations at the end of a basic block to fullfill
 * the register assignment for phi-nodes in the next block
 */
static void add_phi_permutations(ir_node *block, int p)
{
	unsigned  r;
	unsigned *permutation;
	assignment_t *old_assignments;
	int       need_permutation;
	ir_node  *node;
	ir_node  *pred = get_Block_cfgpred_block(block, p);

	block_info_t *pred_info = get_block_info(pred);

	/* predecessor not processed yet? nothing to do */
	if (!pred_info->processed)
		return;

	permutation = ALLOCAN(unsigned, n_regs);
	for (r = 0; r < n_regs; ++r) {
		permutation[r] = r;
	}

	/* check phi nodes */
	need_permutation = 0;
	node = sched_first(block);
	for ( ; is_Phi(node); node = sched_next(node)) {
		const arch_register_t *reg;
		int                    regn;
		int                    a;
		ir_node               *op;

		if (!arch_irn_consider_in_reg_alloc(cls, node))
			continue;

		op = get_Phi_pred(node, p);
		a = find_value_in_block_info(pred_info, op);
		assert(a >= 0);

		reg = arch_get_irn_register(node);
		regn = arch_register_get_index(reg);
		if (regn != a) {
			permutation[regn] = a;
			need_permutation = 1;
		}
	}

	old_assignments = assignments;
	assignments     = pred_info->assignments;
	permutate_values(NULL, be_get_end_of_block_insertion_point(pred),
	                 permutation);
	assignments     = old_assignments;

	node = sched_first(block);
	for ( ; is_Phi(node); node = sched_next(node)) {
		int                    a;
		ir_node               *op;

		if (!arch_irn_consider_in_reg_alloc(cls, node))
			continue;

		op = get_Phi_pred(node, p);
		/* TODO: optimize */
		a = find_value_in_block_info(pred_info, op);
		assert(a >= 0);

		op = pred_info->assignments[a].value;
		set_Phi_pred(node, p, op);
	}
}

938
939
940
941
/**
 * Walker: assign registers to all nodes of a block that
 * needs registers from the currently considered register class.
 */
Matthias Braun's avatar
Matthias Braun committed
942
943
static void allocate_coalesce_block(ir_node *block, void *data)
{
944
945
946
947
	int                    i;
	unsigned               r;
	ir_nodeset_t           live_nodes;
	ir_nodeset_iterator_t  iter;
Michael Beck's avatar
Michael Beck committed
948
	ir_node               *node, *start;
949
950
951
	int                    n_preds;
	block_info_t          *block_info;
	block_info_t         **pred_block_infos;
Matthias Braun's avatar
Matthias Braun committed
952

953
	(void) data;
954
955
956
	DB((dbg, LEVEL_2, "Allocating in block %+F\n",
		block));

Matthias Braun's avatar
Matthias Braun committed
957
	/* clear assignments */
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
	block_info  = get_block_info(block);
	assignments = block_info->assignments;

	for (r = 0; r < n_regs; ++r) {
		assignment_t       *assignment = &assignments[r];
		ir_node            *value      = assignment->value;
		allocation_info_t  *info;

		if (value == NULL)
			continue;

		info                     = get_allocation_info(value);
		info->current_assignment = assignment;
	}

973
	ir_nodeset_init(&live_nodes);
Matthias Braun's avatar
Matthias Braun committed
974

975
976
977
978
979
980
981
	/* gather regalloc infos of predecessor blocks */
	n_preds = get_Block_n_cfgpreds(block);
	pred_block_infos = ALLOCAN(block_info_t*, n_preds);
	for (i = 0; i < n_preds; ++i) {
		ir_node *pred = get_Block_cfgpred_block(block, i);
		pred_block_infos[i] = get_block_info(pred);
	}
Matthias Braun's avatar
Matthias Braun committed
982
983
984

	/* collect live-in nodes and preassigned values */
	be_lv_foreach(lv, block, be_lv_state_in, i) {
Michael Beck's avatar
Michael Beck committed
985
986
		const arch_register_t *reg;

Matthias Braun's avatar
Matthias Braun committed
987
988
989
990
		node = be_lv_get_irn(lv, block, i);
		if (!arch_irn_consider_in_reg_alloc(cls, node))
			continue;

991
		/* remember that this node is live at the beginning of the block */
Matthias Braun's avatar
Matthias Braun committed
992
993
		ir_nodeset_insert(&live_nodes, node);

994
		/* if the node already has a register assigned use it */
Michael Beck's avatar
Michael Beck committed
995
		reg = arch_get_irn_register(node);
Matthias Braun's avatar
Matthias Braun committed
996
		if (reg != NULL) {
997
998
999
1000
1001
			/* TODO: consult pred-block infos here. The value could be copied
			   away in some/all predecessor blocks. We need to construct
			   phi-nodes in this case.
			   We even need to construct some Phi_0 like constructs in cases
			   where the predecessor allocation is not determined yet. */
Matthias Braun's avatar
Matthias Braun committed
1002
1003
1004
1005
1006
1007
1008
			use_reg(node, reg);
		}
	}

	/* handle phis... */
	node = sched_first(block);
	for ( ; is_Phi(node); node = sched_next(node)) {
Michael Beck's avatar
Michael Beck committed
1009
1010
		const arch_register_t *reg;

Matthias Braun's avatar
Matthias Braun committed
1011
1012
1013
1014
		if (!arch_irn_consider_in_reg_alloc(cls, node))
			continue;

		/* fill in regs already assigned */
Michael Beck's avatar
Michael Beck committed
1015
		reg = arch_get_irn_register(node);
Matthias Braun's avatar
Matthias Braun committed
1016
1017
1018
1019
1020
		if (reg != NULL) {
			use_reg(node, reg);
		} else {
			/* TODO: give boni for registers already assigned at the
			   predecessors */
1021
			assign_reg(block, node);
Matthias Braun's avatar
Matthias Braun committed
1022
1023
		}
	}
Michael Beck's avatar
Michael Beck committed
1024
	start = node;
Matthias Braun's avatar
Matthias Braun committed
1025
1026
1027

	/* assign regs for live-in values */
	foreach_ir_nodeset(&live_nodes, node, iter) {
1028
1029
1030
1031
1032
		const arch_register_t *reg;
		reg = arch_get_irn_register(node);
		if (reg != NULL)
			continue;

Matthias Braun's avatar
Matthias Braun committed
1033
1034
1035
		assign_reg(block, node);
	}

1036
1037
1038
1039
1040
1041
1042
1043
	/* permutate values at end of predecessor blocks in case of phi-nodes */
	if (n_preds > 1) {
		int p;
		for (p = 0; p < n_preds; ++p) {
			add_phi_permutations(block, p);
		}
	}

Matthias Braun's avatar
Matthias Braun committed
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
	/* assign instructions in the block */
	for (node = start; !sched_is_end(node); node = sched_next(node)) {
		int arity = get_irn_arity(node);
		int i;

		/* enforce use constraints */
		enforce_constraints(&live_nodes, node);

		/* exchange values to copied values where needed */
		for (i = 0; i < arity; ++i) {
Michael Beck's avatar
Michael Beck committed
1054
1055
1056
			ir_node      *op = get_irn_n(node, i);
			assignment_t *assignment;

Matthias Braun's avatar
Matthias Braun committed
1057
1058
			if (!arch_irn_consider_in_reg_alloc(cls, op))
				continue;
Michael Beck's avatar
Michael Beck committed
1059
			assignment = get_current_assignment(op);
Matthias Braun's avatar
Matthias Braun committed
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
			assert(assignment != NULL);
			if (op != assignment->value) {
				set_irn_n(node, i, assignment->value);
			}
		}

		free_last_uses(&live_nodes, node);

		/* assign output registers */
		/* TODO: 2 phases: first: pre-assigned ones, 2nd real regs */
		if (get_irn_mode(node) == mode_T) {
			const ir_edge_t *edge;
			foreach_out_edge(node, edge) {
				ir_node *proj = get_edge_src_irn(edge);
				if (!arch_irn_consider_in_reg_alloc(cls, proj))
					continue;
				assign_reg(block, proj);
			}
		} else if (arch_irn_consider_in_reg_alloc(cls, node)) {
			assign_reg(block, node);
		}
	}

	ir_nodeset_destroy(&live_nodes);
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
	assignments = NULL;

	block_info->processed = 1;

	/* if we have exactly 1 successor then we might be able to produce phi
	   copies now */
	if (get_irn_n_edges_kind(block, EDGE_KIND_BLOCK) == 1) {
		const ir_edge_t *edge
			= get_irn_out_edge_first_kind(block, EDGE_KIND_BLOCK);
		ir_node      *succ      = get_edge_src_irn(edge);
		int           p         = get_edge_src_pos(edge);
		block_info_t *succ_info = get_block_info(succ);

		if (succ_info->processed) {
			add_phi_permutations(succ, p);
		}
	}
Matthias Braun's avatar
Matthias Braun committed
1101
1102
}

1103
1104
1105
1106
/**
 * Run the register allocator for the current register class.
 */
static void be_straight_alloc_cls(void)
Matthias Braun's avatar
Matthias Braun committed
1107
1108
1109
1110
1111
1112
{
	n_regs         = arch_register_class_n_regs(cls);
	lv             = be_assure_liveness(birg);
	be_liveness_assure_sets(lv);
	be_liveness_assure_chk(lv);

1113
	assignments = NULL;
Matthias Braun's avatar
Matthias Braun committed
1114
1115
1116
1117

	ir_reserve_resources(irg, IR_RESOURCE_IRN_LINK | IR_RESOURCE_IRN_VISITED);
	inc_irg_visited(irg);

1118
	DB((dbg, LEVEL_2, "=== Allocating registers of %s ===\n", cls->name));
1119
1120
1121

	irg_block_walk_graph(irg, NULL, analyze_block, NULL);
	irg_block_walk_graph(irg, NULL, allocate_coalesce_block, NULL);
Matthias Braun's avatar
Matthias Braun committed
1122
1123
1124
1125

	ir_free_resources(irg, IR_RESOURCE_IRN_LINK | IR_RESOURCE_IRN_VISITED);
}

1126
1127
1128
/**
 * Run the spiller on the current graph.
 */
Matthias Braun's avatar
Matthias Braun committed
1129
1130
1131
static void spill(void)
{
	/* make sure all nodes show their real register pressure */
1132
	BE_TIMER_PUSH(t_ra_constr);
1133
	be_pre_spill_prepare_constr(birg, cls);
1134
	BE_TIMER_POP(t_ra_constr);
Matthias Braun's avatar
Matthias Braun committed
1135
1136

	/* spill */
1137
	BE_TIMER_PUSH(t_ra_spill);
Matthias Braun's avatar
Matthias Braun committed
1138
	be_do_spill(birg, cls);
1139
	BE_TIMER_POP(t_ra_spill);
Matthias Braun's avatar
Matthias Braun committed
1140

1141
	BE_TIMER_PUSH(t_ra_spill_apply);
Matthias Braun's avatar
Matthias Braun committed
1142
	check_for_memory_operands(irg);
1143
	BE_TIMER_POP(t_ra_spill_apply);
Matthias Braun's avatar
Matthias Braun committed
1144
1145
}

1146
1147
1148
/**
 * The straight register allocator for a whole procedure.
 */
Matthias Braun's avatar
Matthias Braun committed
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
static void be_straight_alloc(be_irg_t *new_birg)
{
	const arch_env_t *arch_env = new_birg->main_env->arch_env;
	int   n_cls                = arch_env_get_n_reg_class(arch_env);
	int   c;

	obstack_init(&obst);

	birg      = new_birg;
	irg       = be_get_birg_irg(birg);
	execfreqs = birg->exec_freq;

	/* TODO: extract some of the stuff from bechordal allocator, like
	 * statistics, time measurements, etc. and use them here too */

	for (c = 0; c < n_cls; ++c) {
		cls = arch_env_get_reg_class(arch_env, c);
		if (arch_register_class_flags(cls) & arch_register_class_flag_manual_ra)
			continue;

		stat_ev_ctx_push_str("bestraight_cls", cls->name);

		n_regs      = cls->n_regs;
		ignore_regs = bitset_malloc(n_regs);
		be_put_ignore_regs(birg, cls, ignore_regs);

		spill();

		/* verify schedule and register pressure */
		BE_TIMER_PUSH(t_verify);
		if (birg->main_env->options->vrfy_option == BE_CH_VRFY_WARN) {
			be_verify_schedule(birg);
			be_verify_register_pressure(birg, cls, irg);
		} else if (birg->main_env->options->vrfy_option == BE_CH_VRFY_ASSERT) {
			assert(be_verify_schedule(birg) && "Schedule verification failed");
			assert(be_verify_register_pressure(birg, cls, irg)
				&& "Register pressure verification failed");
		}
		BE_TIMER_POP(t_verify);

		BE_TIMER_PUSH(t_ra_color);
		be_straight_alloc_cls();
		BE_TIMER_POP(t_ra_color);

		bitset_free(ignore_regs);

		/* TODO: dump intermediate results */

		stat_ev_ctx_pop("bestraight_cls");
	}

	obstack_free(&obst, NULL);
}

1203
1204
1205
/**
 * Initializes this module.
 */
Matthias Braun's avatar
Matthias Braun committed
1206
1207
void be_init_straight_alloc(void)
{
1208
1209
1210
1211
	static be_ra_t be_ra_straight = {
		be_straight_alloc,
	};

Matthias Braun's avatar
Matthias Braun committed
1212
1213
1214
1215
1216
1217
	FIRM_DBG_REGISTER(dbg, "firm.be.straightalloc");

	be_register_allocator("straight", &be_ra_straight);
}

BE_REGISTER_MODULE_CONSTRUCTOR(be_init_straight_alloc);