amd64_finish.c 8.34 KB
Newer Older
1
2
/*
 * This file is part of libFirm.
3
 * Copyright (C) 2014 University of Karlsruhe.
4
5
6
7
8
9
10
 */

/**
 * @file
 * @brief   This file implements functions to finalize the irg for emit.
 */
#include "amd64_finish.h"
Matthias Braun's avatar
Matthias Braun committed
11
12
#include "amd64_new_nodes.h"
#include "amd64_nodes_attr.h"
13
#include "amd64_transform.h"
14
#include "bearch.h"
15
#include "bearch_amd64_t.h"
16
17
18
#include "benode.h"
#include "besched.h"
#include "debug.h"
Matthias Braun's avatar
Matthias Braun committed
19
#include "panic.h"
20
#include "gen_amd64_new_nodes.h"
21
#include "gen_amd64_regalloc_if.h"
22
#include "irgwalk.h"
Matthias Braun's avatar
Matthias Braun committed
23
#include "util.h"
24
#include "irgmod.h"
25
26
27
28
29
30

DEBUG_ONLY(static firm_dbg_module_t *dbg = NULL;)

/**
 * Returns the index of the first "same" register.
 */
Matthias Braun's avatar
Matthias Braun committed
31
static unsigned get_first_same(arch_register_req_t const *const req)
32
{
33
	unsigned const other = req->should_be_same;
Matthias Braun's avatar
Matthias Braun committed
34
	for (unsigned i = 0; i != 32; ++i) {
35
36
37
38
39
40
		if (other & (1U << i))
			return i;
	}
	panic("same position not found");
}

41
42
43
44
45
static bool is_commutative(const ir_node *node)
{
	return arch_get_irn_flags(node) & amd64_arch_irn_flag_commutative_binop;
}

Matthias Braun's avatar
Matthias Braun committed
46
47
48
static bool try_swap_inputs(ir_node *node)
{
	/* commutative operation, just switch the inputs */
49
50
	if (is_commutative(node)) {
		assert(get_amd64_attr_const(node)->op_mode == AMD64_OP_REG_REG);
Matthias Braun's avatar
Matthias Braun committed
51
52
53
54
55
56
57
58
59
60
		/* TODO: support Cmp input swapping */
		ir_node *in0 = get_irn_n(node, 0);
		ir_node *in1 = get_irn_n(node, 1);
		set_irn_n(node, 0, in1);
		set_irn_n(node, 1, in0);
		return true;
	}
	return false;
}

61
62
63
64
65
66
67
68
69
70
71
72
73
static int get_insn_mode_bits(amd64_insn_mode_t insn_mode)
{
	switch (insn_mode) {
	case INSN_MODE_8:       return 8;
	case INSN_MODE_16:      return 16;
	case INSN_MODE_32:      return 32;
	case INSN_MODE_64:      return 64;
	case INSN_MODE_128:     return 128;
	case INSN_MODE_INVALID:
	default:                panic("bad insn mode");
	}
}

74
75
76
77
78
79
80
81
82
83
/**
  * Transforms a Sub to a Neg + Add, which subsequently allows swapping
  * of the inputs. The swapping is also (implicitly) done here.
  */
static void transform_sub_to_neg_add(ir_node *node,
                                     const arch_register_t *out_reg)
{
	ir_node  *block = get_nodes_block(node);
	dbg_info *dbgi  = get_irn_dbg_info(node);

84
85
	ir_node *in1 = get_irn_n(node, 0);
	ir_node *in2 = get_irn_n(node, 1);
86

87
	const arch_register_t *in2_reg = arch_get_irn_register(in2);
88

89
90
91
	const amd64_binop_addr_attr_t *attr = get_amd64_binop_addr_attr(node);
	ir_node *add, *add_res;

92
	if (is_amd64_subs(node)) {
93
94
95
		int bits = get_insn_mode_bits(attr->base.insn_mode);
		ir_tarval *tv = get_mode_one(amd64_mode_xmm);
		tv = tarval_shl_unsigned(tv, bits - 1);
96
		ir_entity *sign_bit_const = create_float_const_entity(tv);
97
98
99

		amd64_binop_addr_attr_t xor_attr;
		memset(&xor_attr, 0, sizeof(xor_attr));
Matthias Braun's avatar
Matthias Braun committed
100
		xor_attr.base.insn_mode             = INSN_MODE_64;
101
		xor_attr.base.base.op_mode          = AMD64_OP_REG_ADDR;
102
		init_lconst_addr(&xor_attr.base.addr, sign_bit_const);
103
104

		ir_node *xor_in[] = { in2 };
105
106
		ir_node *xor = new_bd_amd64_xorp(dbgi, block, ARRAY_SIZE(xor_in),
		                                 xor_in, &xor_attr);
107
		arch_set_irn_register_reqs_in(xor, amd64_xmm_reqs);
108
		ir_node *const neg = be_new_Proj(xor, pn_amd64_xorp_res);
109
110
111
112
113

		sched_add_before(node, xor);
		arch_set_irn_register(neg, in2_reg);

		ir_node *in[] = { neg, in1 };
114
		add     = new_bd_amd64_adds(dbgi, block, ARRAY_SIZE(in), in, attr);
115
		add_res = be_new_Proj(add, pn_amd64_adds_res);
116
	} else {
117
118
119
		assert(is_amd64_sub(node));
		ir_node *neg = new_bd_amd64_neg(dbgi, block, in2, attr->base.insn_mode);
		arch_set_irn_register_out(neg, pn_amd64_neg_res, out_reg);
120
		sched_add_before(node, neg);
121
		ir_node *const neg_res = be_new_Proj(neg, pn_amd64_neg_res);
122

123
		ir_node *in[] = { neg_res, in1 };
124
		add     = new_bd_amd64_add(dbgi, block, ARRAY_SIZE(in), in, attr);
125
		add_res = be_new_Proj(add, pn_amd64_add_res);
126
	}
127
	arch_set_irn_register_reqs_in(add, arch_get_irn_register_reqs_in(node));
128
129
130
131
	arch_set_irn_register(add_res, out_reg);

	/* exchange the add and the sub */
	sched_replace(node, add);
132
	exchange(node, add);
133
134
}

Matthias Braun's avatar
Matthias Braun committed
135
136
static ir_node *amd64_turn_back_am(ir_node *node)
{
Matthias Braun's avatar
Matthias Braun committed
137
138
139
	dbg_info          *dbgi  = get_irn_dbg_info(node);
	ir_node           *block = get_nodes_block(node);
	amd64_addr_attr_t *attr  = get_amd64_addr_attr(node);
Matthias Braun's avatar
Matthias Braun committed
140

Matthias Braun's avatar
Matthias Braun committed
141
	amd64_addr_t new_addr = attr->addr;
Matthias Braun's avatar
Matthias Braun committed
142
143
	ir_node *load_in[3];
	int      load_arity = 0;
144
145
	if (attr->addr.base_input != NO_INPUT &&
	    attr->addr.base_input != RIP_INPUT) {
Matthias Braun's avatar
Matthias Braun committed
146
147
		new_addr.base_input = load_arity;
		load_in[load_arity++] = get_irn_n(node, attr->addr.base_input);
Matthias Braun's avatar
Matthias Braun committed
148
	}
Matthias Braun's avatar
Matthias Braun committed
149
150
151
	if (attr->addr.index_input != NO_INPUT) {
		new_addr.index_input = load_arity;
		load_in[load_arity++] = get_irn_n(node, attr->addr.index_input);
Matthias Braun's avatar
Matthias Braun committed
152
	}
Matthias Braun's avatar
Matthias Braun committed
153
154
155
	assert(attr->addr.mem_input != NO_INPUT);
	new_addr.mem_input = load_arity;
	load_in[load_arity++] = get_irn_n(node, attr->addr.mem_input);
Matthias Braun's avatar
Matthias Braun committed
156

157
158
159
	ir_node *load = new_bd_amd64_mov_gp(dbgi, block, load_arity, load_in,
	                                    attr->insn_mode, AMD64_OP_ADDR,
	                                    new_addr);
160
	arch_set_irn_register_reqs_in(load, gp_am_reqs[load_arity - 1]);
161
	ir_node *const load_res = be_new_Proj(load, pn_amd64_mov_gp_res);
Matthias Braun's avatar
Matthias Braun committed
162
163

	/* change operation */
Matthias Braun's avatar
Matthias Braun committed
164
165
	const amd64_binop_addr_attr_t *binop_attr
		= (const amd64_binop_addr_attr_t*)attr;
Matthias Braun's avatar
Matthias Braun committed
166
	ir_node *new_in[2];
Matthias Braun's avatar
Matthias Braun committed
167
	new_in[0] = get_irn_n(node, binop_attr->u.reg_input);
Matthias Braun's avatar
Matthias Braun committed
168
169
	new_in[1] = load_res;
	set_irn_in(node, ARRAY_SIZE(new_in), new_in);
Matthias Braun's avatar
Matthias Braun committed
170
171
172
	attr->base.op_mode     = AMD64_OP_REG_REG;
	attr->addr.base_input  = NO_INPUT;
	attr->addr.index_input = NO_INPUT;
Matthias Braun's avatar
Matthias Braun committed
173
174
175
176
177
178

	/* rewire mem-proj */
	foreach_out_edge(node, edge) {
		ir_node *out = get_edge_src_irn(edge);
		if (get_irn_mode(out) == mode_M) {
			set_Proj_pred(out, load);
179
			set_Proj_num(out, pn_amd64_mov_gp_M);
Matthias Braun's avatar
Matthias Braun committed
180
181
182
183
184
185
186
187
188
			break;
		}
	}

	if (sched_is_scheduled(node))
		sched_add_before(node, load);
	return load_res;
}

189
190
191
192
193
194
195
/**
 * Insert copies for all amd64 nodes where the should_be_same requirement is
 * not fulfilled.
 */
static void assure_should_be_same_requirements(ir_node *const node)
{
	/* Check all OUT requirements, if there is a should_be_same. */
Christoph Mallon's avatar
Christoph Mallon committed
196
	be_foreach_out(node, i) {
Matthias Braun's avatar
Matthias Braun committed
197
198
		arch_register_req_t const *const req
			= arch_get_irn_register_req_out(node, i);
199
		if (req->should_be_same == 0)
Matthias Braun's avatar
Matthias Braun committed
200
201
202
203
204
205
206
207
			continue;
		unsigned               const same_pos = get_first_same(req);
		ir_node               *const in_node  = get_irn_n(node, same_pos);
		arch_register_t const *const in_reg   = arch_get_irn_register(in_node);
		arch_register_t const *const out_reg
			= arch_get_irn_register_out(node, i);
		if (in_reg == out_reg)
			continue;
208

Matthias Braun's avatar
Matthias Braun committed
209
		/* test if any other input is using the out register */
210
211
		foreach_irn_in(node, i2, in) {
			arch_register_t const *const reg = arch_get_irn_register(in);
Matthias Braun's avatar
Matthias Braun committed
212
213
			if (reg == out_reg && (unsigned)i2 != same_pos) {
				if (!is_amd64_irn(node))
214
					panic("cannot fulfill should_be_same on non-amd64 node");
Matthias Braun's avatar
Matthias Braun committed
215
216
				/* see what role this register has */
				const amd64_attr_t *attr = get_amd64_attr_const(node);
Matthias Braun's avatar
Matthias Braun committed
217
218
219
				if (attr->op_mode == AMD64_OP_ADDR
				 || attr->op_mode == AMD64_OP_REG
				 || attr->op_mode == AMD64_OP_REG_IMM) {
Matthias Braun's avatar
Matthias Braun committed
220
					panic("unexpected op_mode");
Matthias Braun's avatar
Matthias Braun committed
221
				} else if (attr->op_mode == AMD64_OP_REG_REG) {
Matthias Braun's avatar
Matthias Braun committed
222
223
224
225
swap:;
					bool res = try_swap_inputs(node);
					if (res)
						return;
226

227
					if (is_amd64_sub(node) || is_amd64_subs(node)) {
228
229
230
						transform_sub_to_neg_add(node, out_reg);
						return;
					}
Matthias Braun's avatar
Matthias Braun committed
231
232
					panic("couldn't swap inputs of %+F", node);
				} else {
233
					assert(attr->op_mode == AMD64_OP_REG_ADDR);
Matthias Braun's avatar
Matthias Braun committed
234
235
236
237
238
239
240
241
					/* extract load into an own instruction */
					ir_node *res = amd64_turn_back_am(node);
					arch_set_irn_register(res, out_reg);
					goto swap;
				}
			}
		}

Matthias Braun's avatar
Matthias Braun committed
242
243
		ir_node *const block = get_nodes_block(node);
		ir_node *const copy  = be_new_Copy(block, in_node);
244

Matthias Braun's avatar
Matthias Braun committed
245
246
247
248
249
250
251
252
		/* Destination is the out register. */
		arch_set_irn_register(copy, out_reg);
		/* Insert copy before the node into the schedule. */
		sched_add_before(node, copy);
		/* Set copy as in. */
		set_irn_n(node, same_pos, copy);

		DBG((dbg, LEVEL_1, "created copy %+F for should be same argument at input %d of %+F\n", copy, same_pos, node));
253
254
255
256
257
258
259
260
261
262
	}
}

/**
 * Block walker: finishes a block.
 */
static void amd64_finish_irg_walker(ir_node *const block, void *const env)
{
	(void) env;

263
	/* Insert copies for should_be_same constraints. */
264
265
	sched_foreach_safe(block, irn) {
		if (is_amd64_irn(irn))
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
			assure_should_be_same_requirements(irn);
	}
}

/**
 * Add Copy nodes for not fulfilled should_be_same constraints.
 */
void amd64_finish_irg(ir_graph *const irg)
{
	irg_block_walk_graph(irg, 0, amd64_finish_irg_walker, 0);
}

void amd64_init_finish(void)
{
	FIRM_DBG_REGISTER(dbg, "firm.be.amd64.finish");
}