amd64_transform.c 91.3 KB
Newer Older
1
2
/*
 * This file is part of libFirm.
3
 * Copyright (C) 2012 University of Karlsruhe.
4
5
6
7
8
9
 */

/**
 * @file
 * @brief   code selection (transform FIRM into amd64 FIRM)
 */
Matthias Braun's avatar
Matthias Braun committed
10
#include "debug.h"
Matthias Braun's avatar
Matthias Braun committed
11
#include "panic.h"
Matthias Braun's avatar
Matthias Braun committed
12
13
14
#include "heights.h"
#include "ircons.h"
#include "irgmod.h"
15
16
#include "irgraph_t.h"
#include "irmode_t.h"
Matthias Braun's avatar
Matthias Braun committed
17
#include "irnode_t.h"
18
#include "iropt_t.h"
19
#include "tv_t.h"
Matthias Braun's avatar
Matthias Braun committed
20
#include "util.h"
21

22
23
#include "benode.h"
#include "betranshlp.h"
24
#include "bearch_amd64_t.h"
25
#include "beirg.h"
Matthias Braun's avatar
Matthias Braun committed
26
#include "besched.h"
27

Matthias Braun's avatar
Matthias Braun committed
28
#include "amd64_new_nodes.h"
29
30
#include "amd64_nodes_attr.h"
#include "amd64_transform.h"
Matthias Braun's avatar
Matthias Braun committed
31
#include "../ia32/x86_address_mode.h"
32
#include "../ia32/x86_cconv.h"
33
34
35
36
37

#include "gen_amd64_regalloc_if.h"

DEBUG_ONLY(static firm_dbg_module_t *dbg = NULL;)

38
static ir_mode         *mode_gp;
Matthias Braun's avatar
Matthias Braun committed
39
static ir_mode         *mode_flags;
40
static x86_cconv_t     *current_cconv = NULL;
41
static be_start_info_t  start_mem;
42
static be_start_info_t  start_val[N_AMD64_REGISTERS];
43
44
45
static size_t           start_params_offset;
static pmap            *node_to_stack;
static be_stackorder_t *stackorder;
46

47
48
49
/** we don't have a concept of aliasing registers, so enumerate them
 * manually for the asm nodes. */
const x86_clobber_name_t amd64_additional_clobber_names[] = {
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
	{ "al", REG_RAX }, { "ah", REG_RAX }, { "ax", REG_RAX }, { "eax", REG_RAX },
	{ "bl", REG_RBX }, { "bh", REG_RBX }, { "bx", REG_RBX }, { "ebx", REG_RBX },
	{ "cl", REG_RCX }, { "ch", REG_RCX }, { "cx", REG_RCX }, { "ecx", REG_RCX },
	{ "dl", REG_RDX }, { "dh", REG_RDX }, { "dx", REG_RDX }, { "edx", REG_RDX },
	{ "sil",  REG_RSI }, { "si",   REG_RSI }, { "esi",  REG_RSI },
	{ "dil",  REG_RDI }, { "di",   REG_RDI }, { "edi",  REG_RDI },
	{ "bpl",  REG_RBP }, { "bp",   REG_RBP }, { "ebp",  REG_RBP },
	{ "spl",  REG_RSP }, { "sp",   REG_RSP }, { "esp",  REG_RSP },
	{ "r8b",  REG_R8  }, { "r8w",  REG_R8  }, { "r8d",  REG_R8  },
	{ "r9b",  REG_R9  }, { "r9w",  REG_R9  }, { "r9d",  REG_R9  },
	{ "r10b", REG_R10 }, { "r10w", REG_R10 }, { "r10d", REG_R10 },
	{ "r11b", REG_R11 }, { "r11w", REG_R11 }, { "r11d", REG_R11 },
	{ "r12b", REG_R12 }, { "r12w", REG_R12 }, { "r12d", REG_R12 },
	{ "r13b", REG_R13 }, { "r13w", REG_R13 }, { "r13d", REG_R13 },
	{ "r14b", REG_R14 }, { "r14w", REG_R14 }, { "r14d", REG_R14 },
	{ "r15b", REG_R15 }, { "r15w", REG_R15 }, { "r15d", REG_R15 },
66
67
68
69
	{ NULL, ~0u }
};

#define GP &amd64_reg_classes[CLASS_amd64_gp]
70
const x86_asm_constraint_list_t amd64_asm_constraints = {
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
	['A'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RDX },
	['D'] = { MATCH_REG, GP, 1 << REG_GP_RDI },
	['I'] = { MATCH_IMM, GP, 0 },
	['J'] = { MATCH_IMM, GP, 0 },
	['K'] = { MATCH_IMM, GP, 0 },
	['L'] = { MATCH_IMM, GP, 0 },
	['M'] = { MATCH_IMM, GP, 0 },
	['N'] = { MATCH_IMM, GP, 0 },
	['O'] = { MATCH_IMM, GP, 0 },
	['R'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX | 1 << REG_GP_RSI
		| 1 << REG_GP_RDI | 1 << REG_GP_RBP | 1 << REG_GP_RSP },
	['S'] = { MATCH_REG, GP, 1 << REG_GP_RSI },
	['Q'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX },
	['V'] = { MATCH_MEM, GP, 0 },
	['X'] = { MATCH_ANY, GP, 0 },
	['a'] = { MATCH_REG, GP, 1 << REG_GP_RAX },
	['b'] = { MATCH_REG, GP, 1 << REG_GP_RBX },
	['c'] = { MATCH_REG, GP, 1 << REG_GP_RCX },
	['d'] = { MATCH_REG, GP, 1 << REG_GP_RDX },
	['g'] = { MATCH_ANY, GP, 0 },
	['i'] = { MATCH_IMM, GP, 0 },
	['l'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX | 1 << REG_GP_RSI
		| 1 << REG_GP_RDI | 1 << REG_GP_RBP },
	['m'] = { MATCH_MEM, GP, 0 },
	['n'] = { MATCH_IMM, GP, 0 },
	['o'] = { MATCH_MEM, GP, 0 },
	['p'] = { MATCH_REG, GP, 0 },
	['q'] = { MATCH_REG, GP, 0 },
	['r'] = { MATCH_REG, GP, 0 },
	['x'] = { MATCH_REG, &amd64_reg_classes[CLASS_amd64_xmm], 0 },

	// see comments in ia32_transform.c about unimplemented stuff.
};
#undef GP

Matthias Braun's avatar
Matthias Braun committed
109
static const arch_register_req_t amd64_requirement_gp = {
110
111
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = NULL,
112
	.type            = arch_register_req_type_none,
113
114
115
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
116
117
118
};

static const arch_register_req_t amd64_requirement_flags = {
119
120
	.cls             = &amd64_reg_classes[CLASS_amd64_flags],
	.limited         = NULL,
121
	.type            = arch_register_req_type_none,
122
123
124
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
125
126
127
};

static const arch_register_req_t amd64_requirement_xmm = {
128
129
	.cls             = &amd64_reg_classes[CLASS_amd64_xmm],
	.limited         = NULL,
130
	.type            = arch_register_req_type_none,
131
132
133
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
134
135
};

Matthias Braun's avatar
Matthias Braun committed
136
137
138
#define BIT(x)    (1u << x)

static const arch_register_req_t amd64_requirement_gp_same_0 = {
139
140
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = NULL,
141
	.type            = arch_register_req_type_should_be_same,
142
143
144
	.other_same      = BIT(0),
	.other_different = 0,
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
145
146
};

147
static const arch_register_req_t amd64_requirement_xmm_same_0 = {
148
149
	.cls             = &amd64_reg_classes[CLASS_amd64_xmm],
	.limited         = NULL,
150
	.type            = arch_register_req_type_should_be_same,
151
152
153
	.other_same      = BIT(0),
	.other_different = 0,
	.width           = 1,
154
155
};

Matthias Braun's avatar
Matthias Braun committed
156
static const arch_register_req_t amd64_requirement_gp_same_0_not_1 = {
157
158
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = NULL,
159
	.type            = arch_register_req_type_should_be_same
160
161
162
163
	                   | arch_register_req_type_must_be_different,
	.other_same      = BIT(0),
	.other_different = BIT(1),
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
164
165
166
167
};

static const unsigned amd64_limited_gp_rcx [] = { BIT(REG_GP_RCX) };
static const arch_register_req_t amd64_requirement_rcx = {
168
169
170
171
172
173
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rcx,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
174
175
};

176
177
static const unsigned amd64_limited_gp_rax [] = { BIT(REG_GP_RAX) };
static const arch_register_req_t amd64_requirement_rax = {
178
179
180
181
182
183
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rax,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
184
185
};

186
187
static const unsigned amd64_limited_gp_rdx [] = { BIT(REG_GP_RDX) };
static const arch_register_req_t amd64_requirement_rdx = {
188
189
190
191
192
193
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rdx,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
194
195
};

Tobias Rapp's avatar
Tobias Rapp committed
196
197
198
199
200
201
202
203
204
205
static const unsigned amd64_limited_gp_rsp [] = { BIT(REG_GP_RSP) };
static const arch_register_req_t amd64_requirement_rsp = {
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rsp,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
};

Matthias Braun's avatar
Matthias Braun committed
206
207
208
209
210
211
static const arch_register_req_t *mem_reqs[] = {
	&arch_no_requirement,
};

static const arch_register_req_t *reg_mem_reqs[] = {
	&amd64_requirement_gp,
Matthias Braun's avatar
Matthias Braun committed
212
213
214
	&arch_no_requirement,
};

Tobias Rapp's avatar
Tobias Rapp committed
215
216
217
218
219
220
221
222
223
224
225
static const arch_register_req_t *rsp_mem_reqs[] = {
	&amd64_requirement_rsp,
	&arch_no_requirement,
};

static const arch_register_req_t *rsp_reg_mem_reqs[] = {
	&amd64_requirement_rsp,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

226
227
228
229
230
static const arch_register_req_t *xmm_mem_reqs[] = {
	&amd64_requirement_xmm,
	&arch_no_requirement,
};

Matthias Braun's avatar
Matthias Braun committed
231
232
static const arch_register_req_t *reg_reg_mem_reqs[] = {
	&amd64_requirement_gp,
Matthias Braun's avatar
Matthias Braun committed
233
234
235
236
	&amd64_requirement_gp,
	&arch_no_requirement,
};

237
238
239
240
241
242
static const arch_register_req_t *xmm_reg_mem_reqs[] = {
	&amd64_requirement_xmm,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

Matthias Braun's avatar
Matthias Braun committed
243
244
static const arch_register_req_t *reg_reg_reg_mem_reqs[] = {
	&amd64_requirement_gp,
Matthias Braun's avatar
Matthias Braun committed
245
246
247
248
249
	&amd64_requirement_gp,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

250
251
252
253
254
255
256
static const arch_register_req_t *xmm_reg_reg_mem_reqs[] = {
	&amd64_requirement_xmm,
	&amd64_requirement_gp,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

Tobias Rapp's avatar
Tobias Rapp committed
257
static const arch_register_req_t *reg_flags_reqs[] = {
258
259
260
261
	&amd64_requirement_gp,
	&amd64_requirement_flags,
};

Matthias Braun's avatar
Matthias Braun committed
262
263
static const arch_register_req_t *reg_reg_reqs[] = {
	&amd64_requirement_gp,
264
265
266
	&amd64_requirement_gp,
};

267
268
269
270
271
static const arch_register_req_t *rax_reg_reqs[] = {
	&amd64_requirement_rax,
	&amd64_requirement_gp,
};

272
static const arch_register_req_t *rax_reg_rdx_mem_reqs[] = {
273
274
275
	&amd64_requirement_rax,
	&amd64_requirement_gp,
	&amd64_requirement_rdx,
276
	&arch_no_requirement,
277
278
};

Matthias Braun's avatar
Matthias Braun committed
279
280
281
282
static const arch_register_req_t *reg_reqs[] = {
	&amd64_requirement_gp,
};

283
arch_register_req_t const *amd64_xmm_reqs[] = {
284
285
286
	&amd64_requirement_xmm,
};

Matthias Braun's avatar
Matthias Braun committed
287
288
289
290
291
static const arch_register_req_t *reg_rcx_reqs[] = {
	&amd64_requirement_gp,
	&amd64_requirement_rcx,
};

292
293
294
static const arch_register_req_t *no_reqs[] = {
};

295
296
297
298
299
static const arch_register_req_t *xmm_xmm_reqs[] = {
	&amd64_requirement_xmm,
	&amd64_requirement_xmm,
};

Matthias Braun's avatar
Matthias Braun committed
300
301
static inline bool mode_needs_gp_reg(ir_mode *mode)
{
302
303
	return get_mode_arithmetic(mode) == irma_twos_complement
	    && mode != amd64_mode_xmm; /* mode_xmm is 128bit int at the moment */
Matthias Braun's avatar
Matthias Braun committed
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
}

static bool is_downconv(const ir_node *node)
{
	if (!is_Conv(node))
		return false;

	ir_mode *dest_mode = get_irn_mode(node);
	if (!mode_needs_gp_reg(dest_mode))
		return false;
	ir_mode *src_mode = get_irn_mode(get_Conv_op(node));
	if (!mode_needs_gp_reg(src_mode))
		return false;
	return get_mode_size_bits(dest_mode) <= get_mode_size_bits(src_mode);
}

static ir_node *skip_downconv(ir_node *node)
{
	while (is_downconv(node)) {
		if (get_irn_n_edges(node) > 1)
			break;
		node = get_Conv_op(node);
	}
	return node;
}

static bool is_sameconv(const ir_node *node)
{
	if (!is_Conv(node))
		return false;
	ir_mode *dest_mode = get_irn_mode(node);
	if (!mode_needs_gp_reg(dest_mode))
		return false;
	ir_mode *src_mode = get_irn_mode(get_Conv_op(node));
	if (!mode_needs_gp_reg(src_mode))
		return false;
	return get_mode_size_bits(dest_mode) == get_mode_size_bits(src_mode);
}

static ir_node *skip_sameconv(ir_node *node)
344
{
Matthias Braun's avatar
Matthias Braun committed
345
346
347
348
349
350
	while (is_sameconv(node)) {
		if (get_irn_n_edges(node) > 1)
			break;
		node = get_Conv_op(node);
	}
	return node;
351
352
}

353
354
static ir_node *get_initial_sp(ir_graph *irg)
{
355
	return be_get_start_proj(irg, &start_val[REG_RSP]);
356
357
358
359
}

static ir_node *get_initial_fp(ir_graph *irg)
{
360
	return be_get_start_proj(irg, &start_val[REG_RBP]);
361
362
363
364
}

static ir_node *get_initial_mem(ir_graph *irg)
{
365
	return be_get_start_proj(irg, &start_mem);
366
367
368
369
370
371
372
373
374
375
}

static ir_node *get_frame_base(ir_graph *irg)
{
	if (current_cconv->omit_fp) {
		return get_initial_sp(irg);
	} else {
		return get_initial_fp(irg);
	}
}
376

377
378
379
static amd64_insn_mode_t get_insn_mode_from_mode(const ir_mode *mode)
{
	switch (get_mode_size_bits(mode)) {
380
381
382
383
384
	case   8: return INSN_MODE_8;
	case  16: return INSN_MODE_16;
	case  32: return INSN_MODE_32;
	case  64: return INSN_MODE_64;
	case 128: return INSN_MODE_128;
385
386
387
388
	}
	panic("unexpected mode");
}

389
ir_entity *create_float_const_entity(ir_tarval *const tv)
390
{
391
	ir_entity *entity = pmap_get(ir_entity, amd64_constants, tv);
392
393
394
395
396
397
398
399
400
401
402
403
404
405
	if (entity != NULL)
		return entity;

	ir_mode *mode = get_tarval_mode(tv);
	ir_type *type = get_type_for_mode(mode);
	ir_type *glob = get_glob_type();

	entity = new_entity(glob, id_unique("C%u"), type);
	set_entity_visibility(entity, ir_visibility_private);
	add_entity_linkage(entity, IR_LINKAGE_CONSTANT);

	ir_initializer_t *initializer = create_initializer_tarval(tv);
	set_entity_initializer(entity, initializer);

406
	pmap_insert(amd64_constants, tv, entity);
407
408
409
	return entity;
}

410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
typedef enum reference_mode_t {
	REFERENCE_DIRECT,
	REFERENCE_IP_RELATIVE,
	REFERENCE_GOT,
} reference_mode_t;

static reference_mode_t need_relative_addressing(const ir_entity *entity)
{
	if (!be_options.pic)
		return REFERENCE_DIRECT;

	/* simply everything is instruction pointer relative, external functions
	 * use a global offset table */
	return entity_has_definition(entity)
	   && (get_entity_linkage(entity) & IR_LINKAGE_MERGE) == 0
	    ? REFERENCE_IP_RELATIVE : REFERENCE_GOT;
}

428
static ir_node *create_float_const(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
429
                                   ir_tarval *tv)
430
431
432
{
	ir_graph  *irg     = get_irn_irg(block);
	ir_mode   *tv_mode = get_tarval_mode(tv);
433
	ir_entity *entity  = create_float_const_entity(tv);
434
435
436
437
438
439
440
441
442
443
	ir_node   *nomem   = get_irg_no_mem(irg);

	ir_node *in[] = { nomem };
	amd64_addr_t addr;
	memset(&addr, 0, sizeof(addr));

	addr.immediate.entity       = entity;
	amd64_insn_mode_t insn_mode = get_insn_mode_from_mode(tv_mode);

	addr.index_input = NO_INPUT;
444
445
446
447
448
449
	if (need_relative_addressing(entity) == REFERENCE_DIRECT) {
		addr.base_input = NO_INPUT;
	} else {
		assert(need_relative_addressing(entity) == REFERENCE_IP_RELATIVE);
		addr.base_input = RIP_INPUT;
	}
450

451
	ir_node *load;
452
	unsigned pn_res;
453
454
455
	if (insn_mode == INSN_MODE_128) {
		load = new_bd_amd64_movdqa(dbgi, block, ARRAY_SIZE(in), in,
		                           AMD64_OP_ADDR, addr);
456
		pn_res = pn_amd64_movdqa_res;
457
	} else {
458
459
460
		load = new_bd_amd64_movs_xmm(dbgi, block, ARRAY_SIZE(in), in,
		                             insn_mode, AMD64_OP_ADDR, addr);
		pn_res = pn_amd64_movs_xmm_res;
461
	}
462
463
464
	arch_set_irn_register_reqs_in(load, mem_reqs);
	set_irn_pinned(load, op_pin_state_floats);

465
	return new_r_Proj(load, amd64_mode_xmm, pn_res);
466
467
468
469
470
}

ir_tarval *create_sign_tv(ir_mode *mode)
{
	unsigned size = get_mode_size_bits(mode);
Matthias Braun's avatar
Matthias Braun committed
471
472
473
474
	assert(size == 32 || size == 64 || size == 128);
	ir_mode *intmode = size == 128 ? amd64_mode_xmm
	                 : size == 64  ? mode_Lu
	                               : mode_Iu;
475
476
477
	ir_tarval *one  = get_mode_one(intmode);
	ir_tarval *sign = tarval_shl_unsigned(one, size-1);
	return tarval_bitcast(sign, mode);
478
479
}

480
481
static ir_node *gen_Const(ir_node *node)
{
482
	ir_node  *block = be_transform_nodes_block(node);
483
	dbg_info *dbgi  = get_irn_dbg_info(node);
484
	ir_mode  *mode  = get_irn_mode(node);
485
	ir_tarval *tv = get_Const_tarval(node);
486
487
488

	if (!mode_needs_gp_reg(mode)) {
		if (tarval_is_null(tv)) {
489
			return new_bd_amd64_xorpd_0(dbgi, block);
490
491
		}

492
		return create_float_const(dbgi, block, tv);
493
494
	}

495
496
	uint64_t val = get_tarval_uint64(tv);
	amd64_insn_mode_t imode = val > UINT32_MAX ? INSN_MODE_64 : INSN_MODE_32;
497
	return new_bd_amd64_mov_imm(dbgi, block, imode, val, NULL);
498
499
}

500
static ir_node *gen_Address(ir_node *node)
501
{
502
	ir_node   *block  = be_transform_nodes_block(node);
503
	dbg_info  *dbgi   = get_irn_dbg_info(node);
504
	ir_entity *entity = get_Address_entity(node);
505

Matthias Braun's avatar
Matthias Braun committed
506
507
	/* do we need RIP-relative addressing because of PIC? */
	reference_mode_t mode = need_relative_addressing(entity);
Matthias Braun's avatar
Matthias Braun committed
508
	if (mode == REFERENCE_DIRECT)
509
		return new_bd_amd64_mov_imm(dbgi, block, INSN_MODE_64, 0, entity);
Matthias Braun's avatar
Matthias Braun committed
510
511
512

	amd64_addr_t addr;
	memset(&addr, 0, sizeof(addr));
513
	addr.base_input  = NO_INPUT;
Matthias Braun's avatar
Matthias Braun committed
514
515
	addr.index_input = NO_INPUT;
	addr.mem_input   = NO_INPUT;
Matthias Braun's avatar
Matthias Braun committed
516
517

	if (mode == REFERENCE_IP_RELATIVE) {
518
		addr.base_input       = RIP_INPUT;
Matthias Braun's avatar
Matthias Braun committed
519
		addr.immediate.entity = entity;
520
		return new_bd_amd64_lea(dbgi, block, 0, NULL, INSN_MODE_64, addr);
Matthias Braun's avatar
Matthias Braun committed
521
522
	} else {
		assert(mode == REFERENCE_GOT);
Matthias Braun's avatar
Matthias Braun committed
523
		addr.immediate.entity = new_got_entry_entity(entity);
524
525
526
		ir_node *load = new_bd_amd64_mov_gp(dbgi, block, 0, NULL, INSN_MODE_64,
		                                    AMD64_OP_ADDR, addr);
		return new_r_Proj(load, mode_gp, pn_amd64_mov_gp_res);
Matthias Braun's avatar
Matthias Braun committed
527
528
529
	}
}

530
531
ir_node *amd64_new_IncSP(ir_node *block, ir_node *old_sp, int offset,
                         unsigned align)
Tobias Rapp's avatar
Tobias Rapp committed
532
{
533
534
535
536
	ir_node *incsp = be_new_IncSP(&amd64_registers[REG_RSP], block, old_sp,
	                              offset, align);
	arch_add_irn_flags(incsp, arch_irn_flag_modify_flags);
	return incsp;
Tobias Rapp's avatar
Tobias Rapp committed
537
538
}

Matthias Braun's avatar
Matthias Braun committed
539
typedef ir_node *(*construct_binop_func)(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
540
	int arity, ir_node *in[], const amd64_binop_addr_attr_t *attr_init);
Matthias Braun's avatar
Matthias Braun committed
541

542
543
544
545
typedef ir_node *(*construct_rax_binop_func)(dbg_info *dbgi, ir_node *block,
	int arity, ir_node *in[], amd64_insn_mode_t insn_mode,
	amd64_op_mode_t op_mode, amd64_addr_t addr);

Matthias Braun's avatar
Matthias Braun committed
546
547
548
549
550
551
552
553
typedef enum match_flags_t {
	match_am           = 1 << 0,
	match_mode_neutral = 1 << 1,
	match_immediate    = 1 << 2,
	match_commutative  = 1 << 3,
} match_flags_t;

typedef struct amd64_args_t {
Matthias Braun's avatar
Matthias Braun committed
554
555
556
557
	amd64_binop_addr_attr_t     attr;
	ir_node                    *mem_proj;
	ir_node                    *in[4];
	int                         arity;
Matthias Braun's avatar
Matthias Braun committed
558
559
560
	const arch_register_req_t **reqs;
} amd64_args_t;

Matthias Braun's avatar
Matthias Braun committed
561
static bool match_immediate_32(amd64_imm32_t *imm, const ir_node *op,
562
563
                               bool can_match_ip_relative,
                               bool upper32_dont_care)
Matthias Braun's avatar
Matthias Braun committed
564
565
{
	assert(mode_needs_gp_reg(get_irn_mode(op)));
Matthias Braun's avatar
Matthias Braun committed
566
	assert(imm->offset == 0 && imm->entity == NULL);
Matthias Braun's avatar
Matthias Braun committed
567
568
569
570
571
572
573
574
	if (is_Const(op)) {
		ir_tarval *tv = get_Const_tarval(op);
		if (!tarval_is_long(tv))
			return false;
		long    lval = get_tarval_long(tv);
		int32_t val  = (int32_t)lval;
		if ((long)val != lval)
			return false;
575
576
577
578
579
		/** the immediate value is signed extended to 64bit, sometimes
		 * this is not what we want. */
		if (!upper32_dont_care && val < 0
		    && !mode_is_signed(get_tarval_mode(tv)))
		    return false;
Matthias Braun's avatar
Matthias Braun committed
580
581
582
583
584
585
		imm->offset = val;
		return true;
	} else if (can_match_ip_relative && is_Address(op)) {
		/* TODO: check if entity is in lower 4GB address space/relative */
		ir_entity *entity = get_Address_entity(op);
		imm->entity = entity;
Matthias Braun's avatar
Matthias Braun committed
586
587
		return true;
	}
Matthias Braun's avatar
Matthias Braun committed
588
	/* TODO: SymConst, Add(SymConst, Const) ... */
Matthias Braun's avatar
Matthias Braun committed
589
590
591
592
593
	return false;
}

static ir_heights_t *heights;

594
595
596
597
598
599
600
601
602
static bool input_depends_on_load(ir_node *load, ir_node *input)
{
	ir_node *block = get_nodes_block(load);
	/* if the dependency is in another block, then we ignore it for now
	   as we only match address mode loads in the same block. */
	return get_nodes_block(input) == block
	    && heights_reachable_in_block(heights, input, load);
}

Tobias Rapp's avatar
Tobias Rapp committed
603
604
605
606
607
608
609
610
611
static void fix_node_mem_proj(ir_node *node, ir_node *mem_proj)
{
	if (mem_proj == NULL)
		return;

	ir_node *load = get_Proj_pred(mem_proj);
	be_set_transformed_node(load, node);
}

612
static ir_node *source_am_possible(ir_node *block, ir_node *node)
Matthias Braun's avatar
Matthias Braun committed
613
614
{
	if (!is_Proj(node))
615
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
616
617
	ir_node *load = get_Proj_pred(node);
	if (!is_Load(load))
618
		return NULL;
619
	assert(get_Proj_num(node) == pn_Load_res);
Matthias Braun's avatar
Matthias Braun committed
620
	if (get_nodes_block(load) != block)
621
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
622
623
	/* make sure we are the only user */
	if (get_irn_n_edges(node) != 1)
624
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
625
626
627
	/* ia32 backend claims this can happen, use an assert for now and see
	 * if we hit it :) */
	assert(!be_is_transformed(node));
628
	return load;
Matthias Braun's avatar
Matthias Braun committed
629
630
631
632
633
634
635
636
637
638
}

static bool needs_extension(ir_node *op)
{
	ir_mode *mode = get_irn_mode(op);
	if (get_mode_size_bits(mode) >= 32)
		return false;
	return !be_upper_bits_clean(op, mode);
}

639
static ir_node *create_sext(ir_node *new_block, ir_node *const node, ir_mode *mode)
640
641
642
{
	amd64_insn_mode_t insn_mode = get_insn_mode_from_mode(mode);
	dbg_info *const   dbgi      = get_irn_dbg_info(node);
Tobias Rapp's avatar
Tobias Rapp committed
643
	ir_node  *const   new_node  = be_transform_node(node);
644
645
646
647
648
649

	amd64_shift_attr_t attr;
	memset(&attr, 0, sizeof(attr));
	attr.base.op_mode = AMD64_OP_SHIFT_IMM;
	attr.insn_mode    = insn_mode;
	attr.immediate    = get_mode_size_bits(mode) - 1;
Tobias Rapp's avatar
Tobias Rapp committed
650
	ir_node *in[1]    = { new_node };
651
	ir_node *sar      = new_bd_amd64_sar(dbgi, new_block, ARRAY_SIZE(in),
Tobias Rapp's avatar
Tobias Rapp committed
652
	                                     in, &attr);
653

654
655
	arch_set_irn_register_reqs_in(sar, reg_reqs);
	arch_set_irn_register_req_out(sar, 0, &amd64_requirement_gp_same_0);
656
	return new_r_Proj(sar, mode_gp, pn_amd64_sar_res);
657
658
}

659
static ir_node *create_zext(ir_node *new_block, ir_node *const node)
660
{
Tobias Rapp's avatar
Tobias Rapp committed
661
	dbg_info *const dbgi      = get_irn_dbg_info(node);
662
	ir_node  *const xor0      = new_bd_amd64_xor_0(dbgi, new_block);
663
	arch_set_irn_register_reqs_in(xor0, reg_reqs);
664
	return new_r_Proj(xor0, mode_gp, pn_amd64_xor_0_res);
665
666
}

667
668
669
670
671
static bool val_input(unsigned in)
{
	return in != NO_INPUT && in != RIP_INPUT;
}

Tobias Rapp's avatar
Tobias Rapp committed
672
673
static bool use_address_matching(ir_mode *mode, match_flags_t flags,
                                 ir_node *block,
674
675
676
                                 ir_node *op1, ir_node *op2,
                                 ir_node **out_load, ir_node **out_op)
{
677
678
	if (! (flags & match_am))
		return false;
679

Tobias Rapp's avatar
Tobias Rapp committed
680
681
682
683
	unsigned mode_bits = get_mode_size_bits(mode);
	if (mode_bits == 8 || mode_bits == 16)
		return false;

684
685
686
	ir_node *load2 = source_am_possible(block, op2);
	if (load2 != NULL && !input_depends_on_load(load2, op1)) {
		(*out_load) = load2;
687
688
		(*out_op)   = op1;
		return true;
689
690
691
692
693
694
695
696
697
	}

	if (flags & match_commutative) {
		ir_node *load1 = source_am_possible(block, op1);
		if (load1 != NULL && !input_depends_on_load(load1, op2)) {
			(*out_load) = load1;
			(*out_op)   = op2;
			return true;
		}
698
699
700
701
	}
	return false;
}

702
703
static void perform_address_matching(ir_node *ptr, int *arity,
                                     ir_node **in, amd64_addr_t *addr)
704
705
706
{
	x86_address_t maddr;
	memset(&maddr, 0, sizeof(maddr));
707
	x86_create_address_mode(&maddr, ptr, x86_create_am_normal);
708
709
710
711
712
713

	if (maddr.base != NULL) {
		int base_input   = (*arity)++;
		addr->base_input = base_input;
		in[base_input]   = be_transform_node(maddr.base);
	} else {
714
715
716
717
718
719
720
		ir_entity *entity = maddr.entity;
		if (entity != NULL
		    && need_relative_addressing(entity) != REFERENCE_DIRECT) {
		    addr->base_input = RIP_INPUT;
		} else {
			addr->base_input = NO_INPUT;
		}
721
722
723
724
725
726
727
728
	}
	if (maddr.index != NULL) {
		int index_input = (*arity)++;
		addr->index_input = index_input;
		in[index_input]  = be_transform_node(maddr.index);
	} else {
		addr->index_input = NO_INPUT;
	}
729
730
731
732
733
734
735
736
737
738
	if (maddr.frame_entity != NULL) {
		assert(maddr.entity == NULL);
		addr->immediate.entity = maddr.frame_entity;
		/* not supported yet */
		assert(!is_parameter_entity(maddr.frame_entity)
		       || get_entity_parameter_number(maddr.frame_entity)
		          != IR_VA_START_PARAMETER_NUMBER);
	} else {
		addr->immediate.entity = maddr.entity;
	}
739
740
741
742
	addr->immediate.offset = maddr.offset;
	addr->log_scale        = maddr.scale;
}

Matthias Braun's avatar
Matthias Braun committed
743
744
745
static void match_binop(amd64_args_t *args, ir_node *block,
                        ir_mode *mode, ir_node *op1, ir_node *op2,
                        match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
746
747
748
{
	memset(args, 0, sizeof(*args));

749
	bool use_am;
750
	bool use_xmm       = mode_is_float(mode);
Matthias Braun's avatar
Matthias Braun committed
751
	bool use_immediate = flags & match_immediate;
752
	bool mode_neutral  = flags & match_mode_neutral;
Matthias Braun's avatar
Matthias Braun committed
753

Matthias Braun's avatar
Matthias Braun committed
754
	args->attr.base.insn_mode = get_insn_mode_from_mode(mode);
Matthias Braun's avatar
Matthias Braun committed
755
756

	/* TODO: legalize phase */
757
	if (mode_neutral) {
Matthias Braun's avatar
Matthias Braun committed
758
759
760
761
762
763
764
765
766
		op1 = skip_downconv(op1);
		op2 = skip_downconv(op2);
	} else {
		/* TODO: extend inputs? */
		(void)needs_extension;
	}

	ir_node *load;
	ir_node *op;
767

Tobias Rapp's avatar
Tobias Rapp committed
768
	use_am = use_address_matching(mode, flags, block, op1, op2, &load, &op);
769

770
771
	if (use_immediate
	    && match_immediate_32(&args->attr.u.immediate, op2, false, mode_neutral)) {
772
		assert(!use_xmm && "Can't (yet) match binop with xmm immediate");
Matthias Braun's avatar
Matthias Braun committed
773
		/* fine, we found an immediate */
Matthias Braun's avatar
Matthias Braun committed
774
775
776
		args->attr.base.base.op_mode = AMD64_OP_REG_IMM;
		args->in[args->arity++]      = be_transform_node(op1);
		args->reqs                   = reg_reqs;
777
	} else if (use_am) {
Matthias Braun's avatar
Matthias Braun committed
778
779
780
781
		ir_node *new_op        = be_transform_node(op);
		int      reg_input     = args->arity++;
		args->attr.u.reg_input = reg_input;
		args->in[reg_input]    = new_op;
782
		amd64_addr_t *addr     = &args->attr.base.addr;
Matthias Braun's avatar
Matthias Braun committed
783

784
785
786
		ir_node *ptr = get_Load_ptr(load);
		perform_address_matching(ptr, &(args->arity), args->in, addr);

787
		args->reqs = use_xmm ? xmm_mem_reqs : reg_mem_reqs;
788
		if (addr->base_input != NO_INPUT && addr->index_input != NO_INPUT) {
789
			args->reqs = use_xmm ? xmm_reg_reg_mem_reqs
790
			             : reg_reg_reg_mem_reqs;
791
		} else if (addr->base_input != NO_INPUT || addr->index_input != NO_INPUT) {
792
			args->reqs = use_xmm ? xmm_reg_mem_reqs
793
			             : reg_reg_mem_reqs;
794
795
796
797
798
		}
		ir_node *new_mem    = be_transform_node(get_Load_mem(load));
		int mem_input       = args->arity++;
		args->in[mem_input] = new_mem;
		addr->mem_input     = mem_input;
799

800
		args->mem_proj      = get_Proj_for_pn(load, pn_Load_M);
801
		args->attr.base.base.op_mode = AMD64_OP_ADDR_REG;
Matthias Braun's avatar
Matthias Braun committed
802
803
804
805
	} else {
		/* simply transform the arguments */
		args->in[args->arity++] = be_transform_node(op1);
		args->in[args->arity++] = be_transform_node(op2);
Matthias Braun's avatar
Matthias Braun committed
806
		args->attr.base.base.op_mode = AMD64_OP_REG_REG;
807

808
		args->reqs = use_xmm ? xmm_xmm_reqs : reg_reg_reqs;
Matthias Braun's avatar
Matthias Braun committed
809
810
811
812
	}
}

static ir_node *gen_binop_am(ir_node *node, ir_node *op1, ir_node *op2,
813
814
                             construct_binop_func func, unsigned pn_res,
                             match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
815
816
817
818
{
	ir_node *block = get_nodes_block(node);
	ir_mode *mode  = get_irn_mode(node);
	amd64_args_t args;
Matthias Braun's avatar
Matthias Braun committed
819
	match_binop(&args, block, mode, op1, op2, flags);
Matthias Braun's avatar
Matthias Braun committed
820
821
822
823

	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);

Matthias Braun's avatar
Matthias Braun committed
824
	ir_node *new_node = func(dbgi, new_block, args.arity, args.in, &args.attr);
Matthias Braun's avatar
Matthias Braun committed
825
	arch_set_irn_register_reqs_in(new_node, args.reqs);
826

Tobias Rapp's avatar
Tobias Rapp committed
827
	fix_node_mem_proj(new_node, args.mem_proj);
Matthias Braun's avatar
Matthias Braun committed
828

829
830
831
	if (mode_is_float(mode)) {
		arch_set_irn_register_req_out(new_node, 0,
		                              &amd64_requirement_xmm_same_0);
832
		return new_r_Proj(new_node, amd64_mode_xmm, pn_res);
833
834
835
	} else {
		arch_set_irn_register_req_out(new_node, 0,
		                              &amd64_requirement_gp_same_0);
836
		return new_r_Proj(new_node, mode_gp, pn_res);
837
	}
Matthias Braun's avatar
Matthias Braun committed
838
839
}

840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
static ir_node *gen_binop_rax(ir_node *node, ir_node *op1, ir_node *op2,
                              construct_rax_binop_func make_node,
                              match_flags_t flags)
{
	bool use_am;
	bool mode_neutral  = flags & match_mode_neutral;
	assert(! (flags & match_immediate));

	ir_mode *mode = get_irn_mode(op1);
	amd64_insn_mode_t insn_mode = get_insn_mode_from_mode(mode);

	/* TODO: legalize phase */
	if (mode_neutral) {
		op1 = skip_downconv(op1);
		op2 = skip_downconv(op2);
	} else {
		/* TODO: extend inputs? */
		(void)needs_extension;
	}

	ir_node *load;
	ir_node *op;
	ir_node *block = get_nodes_block(node);
	ir_node *in[4];
	int      arity = 0;
	const arch_register_req_t **reqs;
	amd64_op_mode_t op_mode;
	amd64_addr_t    addr;
	memset(&addr, 0, sizeof(addr));

Tobias Rapp's avatar
Tobias Rapp committed
870
	use_am = use_address_matching(mode, flags, block, op1, op2, &load, &op);
871

872
	ir_node *mem_proj = NULL;
873
874
875
876
877
	if (use_am) {
		ir_node *new_op    = be_transform_node(op);
		int      reg_input = arity++;
		in[reg_input]      = new_op;

878
879
880
881
882
883
		ir_node *ptr = get_Load_ptr(load);
		perform_address_matching(ptr, &arity, in, &addr);

		reqs = reg_mem_reqs;
		if (addr.base_input != NO_INPUT && addr.index_input != NO_INPUT) {
			reqs = reg_reg_reg_mem_reqs;
884
		} else if (addr.base_input != NO_INPUT || addr.index_input != NO_INPUT) {
885
886
887
888
889
890
891
			reqs = reg_reg_mem_reqs;
		}

		ir_node *new_mem = be_transform_node(get_Load_mem(load));
		int mem_input    = arity++;
		in[mem_input]    = new_mem;
		addr.mem_input   = mem_input;
892

893
		mem_proj                = get_Proj_for_pn(load, pn_Load_M);
894
895
896
		op_mode                 = AMD64_OP_RAX_ADDR;
	} else {
		/* simply transform the arguments */
897
898
899
900
		in[arity++] = be_transform_node(op1);
		in[arity++] = be_transform_node(op2);
		reqs        = rax_reg_reqs;
		op_mode     = AMD64_OP_RAX_REG;
901
902
	}

903
	assert((size_t)arity <= ARRAY_SIZE(in));
904
905
906
907
908
	dbg_info *dbgi      = get_irn_dbg_info(node);
	ir_node  *new_block = be_transform_node(block);
	ir_node  *new_node  = make_node(dbgi, new_block, arity, in, insn_mode,
	                                op_mode, addr);
	arch_set_irn_register_reqs_in(new_node, reqs);
909
910
911
	if (mem_proj != NULL) {
		be_set_transformed_node(load, new_node);
	}
912
913
914
	return new_node;
}

915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
static ir_node *gen_binop_xmm(ir_node *node, ir_node *op0, ir_node *op1,
                              construct_binop_func make_node,
                              match_flags_t flags)
{
	ir_node *block = get_nodes_block(node);
	ir_mode *mode  = get_irn_mode(op0);
	amd64_args_t args;
	memset(&args, 0, sizeof(args));

	ir_node *load;
	ir_node *op;
	bool use_am = use_address_matching(mode, flags, block, op0, op1, &load,
	                                   &op);

	if (use_am) {
		int reg_input = args.arity++;
		args.attr.u.reg_input = reg_input;
		args.in[reg_input]    = be_transform_node(op);

		amd64_addr_t *addr = &args.attr.base.addr;
		ir_node      *ptr  = get_Load_ptr(load);
		perform_address_matching(ptr, &args.arity, args.in, addr);

		unsigned reg_count
			= val_input(addr->base_input) + val_input(addr->index_input);
		args.reqs = reg_count == 0 ? xmm_mem_reqs :
		            reg_count == 1 ? xmm_reg_mem_reqs
		                           : xmm_reg_reg_mem_reqs;

		ir_node *new_mem   = be_transform_node(get_Load_mem(load));
		int mem_input      = args.arity++;
		args.in[mem_input] = new_mem;
		addr->mem_input    = mem_input;

		args.mem_proj      = get_Proj_for_pn(load, pn_Load_M);
		args.attr.base.base.op_mode = AMD64_OP_ADDR_REG;
	} else {
		args.in[args.arity++] = be_transform_node(op0);
		args.in[args.arity++] = be_transform_node(op1);
		args.attr.base.base.op_mode = AMD64_OP_REG_REG;
		args.reqs = xmm_xmm_reqs;
	}

	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);
	ir_node *new_node = make_node(dbgi, new_block, args.arity, args.in,
	                              &args.attr);
	arch_set_irn_register_reqs_in(new_node, args.reqs);

	fix_node_mem_proj(new_node, args.mem_proj);

	arch_set_irn_register_req_out(new_node, 0,
								  &amd64_requirement_xmm_same_0);
968
	return new_r_Proj(new_node, amd64_mode_xmm, pn_amd64_subs_res);
969
970
}

Matthias Braun's avatar
Matthias Braun committed
971
typedef ir_node *(*construct_shift_func)(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
972
	int arity, ir_node *in[], const amd64_shift_attr_t *attr_init);
Matthias Braun's avatar
Matthias Braun committed
973
974

static ir_node *gen_shift_binop(ir_node *node, ir_node *op1, ir_node *op2,
975
976
                                construct_shift_func func, unsigned pn_res,
                                match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
977
978
979
980
981
982
983
984
985
986
987
988
{
	ir_mode *mode = get_irn_mode(node);
	assert(!mode_is_float(mode));

	if (get_mode_modulo_shift(mode) != 32 && get_mode_size_bits(mode) != 64)
		panic("insupported modulo shift used");

	ir_node *in[3];
	int      arity = 0;
	if (flags & match_mode_neutral) {
		op1 = skip_downconv(op1);
		in[arity++] = be_transform_node(op1);
Matthias Braun's avatar
Matthias Braun committed
989
		mode = get_mode_size_bits(mode) > 32 ? mode_gp : mode_Iu;
Matthias Braun's avatar
Matthias Braun committed
990
991
	} else {
		op1 = skip_sameconv(op1);
992
993
994

		/* Use 8/16bit operations instead of doing zext/upconv */
		in[arity++] = be_transform_node(op1);
Matthias Braun's avatar
Matthias Braun committed
995
996
997
998
999
1000
	}

	/* we can skip any convs for the shift count, as it only uses the lowest
	 * 5/6 bits anyway */
	while (is_Conv(op2) && get_irn_n_edges(op2) == 1) {
		ir_node *const op = get_Conv_op(op2);
1001
		if (get_mode_arithmetic(get_irn_mode(op)) != irma_twos_complement)
Matthias Braun's avatar
Matthias Braun committed
1002
1003
1004
1005
			break;
		op2 = op;
	}

Matthias Braun's avatar
Matthias Braun committed
1006
1007
	amd64_shift_attr_t attr;
	memset(&attr, 0, sizeof(attr));
Matthias Braun's avatar
Matthias Braun committed
1008
1009
1010
	const arch_register_req_t **reqs;
	const arch_register_req_t  *out_req0;
	if (is_Const(op2)) {
Matthias Braun's avatar
Matthias Braun committed
1011
1012
1013
		attr.base.op_mode = AMD64_OP_SHIFT_IMM;
		reqs              = reg_reqs;
		out_req0          = &amd64_requirement_gp_same_0;
1014
		attr.immediate    = get_Const_long(op2);
Matthias Braun's avatar
Matthias Braun committed
1015
	} else {
Matthias Braun's avatar
Matthias Braun committed
1016
1017
1018
1019
		attr.base.op_mode = AMD64_OP_SHIFT_REG;
		in[arity++]       = be_transform_node(op2);
		reqs              = reg_rcx_reqs;
		out_req0          = &amd64_requirement_gp_same_0_not_1;
Matthias Braun's avatar
Matthias Braun committed
1020
	}
Matthias Braun's avatar
Matthias Braun committed
1021
	attr.insn_mode = get_insn_mode_from_mode(mode);
Matthias Braun's avatar
Matthias Braun committed
1022

Matthias Braun's avatar
Matthias Braun committed
1023
	dbg_info *const dbgi      = get_irn_dbg_info(node);
1024
	ir_node  *const new_block = be_transform_nodes_block(node);
Matthias Braun's avatar
Matthias Braun committed
1025
	ir_node  *const new_node  = func(dbgi, new_block, arity, in, &attr);
Matthias Braun's avatar
Matthias Braun committed
1026
1027
	arch_set_irn_register_reqs_in(new_node, reqs);
	arch_set_irn_register_req_out(new_node, 0, out_req0);
1028
	return new_r_Proj(new_node, mode_gp, pn_res);
1029
1030
}

1031
static ir_node *create_lea_as_add(ir_node *node, ir_node *op1, ir_node *op2)
1032
1033
{
	dbg_info *const dbgi = get_irn_dbg_info(node);
1034
	ir_node  *new_block  = be_transform_nodes_block(node);
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
	ir_mode *mode        = get_irn_mode(node);

	amd64_insn_mode_t insn_mode;
	if (get_mode_size_bits(mode) <= 32)
		insn_mode = INSN_MODE_32;
	else
		insn_mode = INSN_MODE_64;

	const arch_register_req_t **reqs;
	amd64_addr_t addr;
	memset(&addr, 0, sizeof(addr));

	ir_node *in[2];
	int arity = 0;

	if (match_immediate_32(&addr.immediate, op2, false, true)) {
		in[arity++]      = be_transform_node(op1);
		reqs             = reg_reqs;
		addr.index_input = NO_INPUT;
	} else {
		in[arity++]      = be_transform_node(op1);
		in[arity++]      = be_transform_node(op2);
1057
1058
		addr.base_input  = 0;
		addr.index_input = 1;
1059
1060
1061
		reqs             = reg_reg_reqs;
	}

1062
	ir_node *res = new_bd_amd64_lea(dbgi, new_block, arity, in, insn_mode, addr);
1063
	arch_set_irn_register_reqs_in(res, reqs);
1064
	return res;
1065
1066
}

Matthias Braun's avatar
Matthias Braun committed
1067
1068
static ir_node *gen_Add(ir_node *const node)
{
1069
1070
1071
	match_flags_t flags = match_immediate | match_am | match_mode_neutral
	                      | match_commutative;

Matthias Braun's avatar
Matthias Braun committed
1072
1073
	ir_node *op1 = get_Add_left(node);
	ir_node *op2 = get_Add_right(node);
1074
1075
1076
1077
1078

	ir_mode *mode  = get_irn_mode(node);
	ir_node *block = get_nodes_block(node);
	ir_node *load, *op;

1079
	if (mode_is_float(mode)) {
1080
1081
		return gen_binop_am(node, op1, op2, new_bd_amd64_adds,
							pn_amd64_adds_res, match_commutative | match_am);
1082
1083
	}

1084
1085
1086
1087
	bool use_am = use_address_matching(mode, flags, block, op1, op2, &load, &op);

	ir_node *res;
	if (use_am)
1088
1089
		res = gen_binop_am(node, op1, op2, new_bd_amd64_add, pn_amd64_add_res,
		                   flags);
1090
	else
1091
		res = create_lea_as_add(node, op1, op2);
1092

Matthias Braun's avatar
Matthias Braun committed
1093
1094
1095
	x86_mark_non_am(node);
	return res;
}
1096

Matthias Braun's avatar
Matthias Braun committed
1097
static ir_node *gen_Sub(ir_node *const node)
1098
{
Matthias Braun's avatar
Matthias Braun committed
1099
1100
	ir_node  *const op1     = get_Sub_left(node);
	ir_node  *const op2     = get_Sub_right(node);
1101
	ir_mode  *const mode    = get_irn_mode(node);
1102
1103

	if (mode_is_float(mode)) {
1104
1105
		return gen_binop_am(node, op1, op2, new_bd_amd64_subs,
		                    pn_amd64_subs_res, match_am);
1106
	} else {
1107
		/* TODO: do not match AM yet until we have a sub->neg+add rule
1108
		 * in amd64_finish */
1109
1110
		return gen_binop_am(node, op1, op2, new_bd_amd64_sub, pn_amd64_sub_res,
		                    match_immediate);
1111
	}
Matthias Braun's avatar
Matthias Braun committed
1112
1113
1114
1115
1116
1117
}

static ir_node *gen_And(ir_node *const node)
{
	ir_node *op1 = get_And_left(node);
	ir_node *op2 = get_And_right(node);
1118
	return gen_binop_am(node, op1, op2, new_bd_amd64_and, pn_amd64_and_res,
Matthias Braun's avatar
Matthias Braun committed
1119
1120
	                    match_immediate | match_am | match_mode_neutral
	                    | match_commutative);
Robin Redeker's avatar