amd64_transform.c 89.9 KB
Newer Older
1
2
/*
 * This file is part of libFirm.
3
 * Copyright (C) 2012 University of Karlsruhe.
4
5
6
7
8
9
 */

/**
 * @file
 * @brief   code selection (transform FIRM into amd64 FIRM)
 */
Matthias Braun's avatar
Matthias Braun committed
10
#include "debug.h"
Matthias Braun's avatar
Matthias Braun committed
11
#include "panic.h"
Matthias Braun's avatar
Matthias Braun committed
12
13
14
#include "heights.h"
#include "ircons.h"
#include "irgmod.h"
15
16
#include "irgraph_t.h"
#include "irmode_t.h"
Matthias Braun's avatar
Matthias Braun committed
17
#include "irnode_t.h"
18
#include "iropt_t.h"
19
#include "tv_t.h"
Matthias Braun's avatar
Matthias Braun committed
20
#include "util.h"
21

22
23
#include "benode.h"
#include "betranshlp.h"
24
#include "bearch_amd64_t.h"
25
#include "beirg.h"
Matthias Braun's avatar
Matthias Braun committed
26
#include "besched.h"
27

Matthias Braun's avatar
Matthias Braun committed
28
#include "amd64_new_nodes.h"
29
30
#include "amd64_nodes_attr.h"
#include "amd64_transform.h"
Matthias Braun's avatar
Matthias Braun committed
31
#include "../ia32/x86_address_mode.h"
32
#include "../ia32/x86_cconv.h"
33
34
35
36
37

#include "gen_amd64_regalloc_if.h"

DEBUG_ONLY(static firm_dbg_module_t *dbg = NULL;)

38
static ir_mode         *mode_gp;
Matthias Braun's avatar
Matthias Braun committed
39
static ir_mode         *mode_flags;
40
static x86_cconv_t     *current_cconv = NULL;
41
static be_start_info_t  start_mem;
42
static be_start_info_t  start_val[N_AMD64_REGISTERS];
43
44
45
static size_t           start_params_offset;
static pmap            *node_to_stack;
static be_stackorder_t *stackorder;
46

47
48
49
/** we don't have a concept of aliasing registers, so enumerate them
 * manually for the asm nodes. */
const x86_clobber_name_t amd64_additional_clobber_names[] = {
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
	{ "al", REG_RAX }, { "ah", REG_RAX }, { "ax", REG_RAX }, { "eax", REG_RAX },
	{ "bl", REG_RBX }, { "bh", REG_RBX }, { "bx", REG_RBX }, { "ebx", REG_RBX },
	{ "cl", REG_RCX }, { "ch", REG_RCX }, { "cx", REG_RCX }, { "ecx", REG_RCX },
	{ "dl", REG_RDX }, { "dh", REG_RDX }, { "dx", REG_RDX }, { "edx", REG_RDX },
	{ "sil",  REG_RSI }, { "si",   REG_RSI }, { "esi",  REG_RSI },
	{ "dil",  REG_RDI }, { "di",   REG_RDI }, { "edi",  REG_RDI },
	{ "bpl",  REG_RBP }, { "bp",   REG_RBP }, { "ebp",  REG_RBP },
	{ "spl",  REG_RSP }, { "sp",   REG_RSP }, { "esp",  REG_RSP },
	{ "r8b",  REG_R8  }, { "r8w",  REG_R8  }, { "r8d",  REG_R8  },
	{ "r9b",  REG_R9  }, { "r9w",  REG_R9  }, { "r9d",  REG_R9  },
	{ "r10b", REG_R10 }, { "r10w", REG_R10 }, { "r10d", REG_R10 },
	{ "r11b", REG_R11 }, { "r11w", REG_R11 }, { "r11d", REG_R11 },
	{ "r12b", REG_R12 }, { "r12w", REG_R12 }, { "r12d", REG_R12 },
	{ "r13b", REG_R13 }, { "r13w", REG_R13 }, { "r13d", REG_R13 },
	{ "r14b", REG_R14 }, { "r14w", REG_R14 }, { "r14d", REG_R14 },
	{ "r15b", REG_R15 }, { "r15w", REG_R15 }, { "r15d", REG_R15 },
66
67
68
69
	{ NULL, ~0u }
};

#define GP &amd64_reg_classes[CLASS_amd64_gp]
70
const x86_asm_constraint_list_t amd64_asm_constraints = {
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
	['A'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RDX },
	['D'] = { MATCH_REG, GP, 1 << REG_GP_RDI },
	['I'] = { MATCH_IMM, GP, 0 },
	['J'] = { MATCH_IMM, GP, 0 },
	['K'] = { MATCH_IMM, GP, 0 },
	['L'] = { MATCH_IMM, GP, 0 },
	['M'] = { MATCH_IMM, GP, 0 },
	['N'] = { MATCH_IMM, GP, 0 },
	['O'] = { MATCH_IMM, GP, 0 },
	['R'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX | 1 << REG_GP_RSI
		| 1 << REG_GP_RDI | 1 << REG_GP_RBP | 1 << REG_GP_RSP },
	['S'] = { MATCH_REG, GP, 1 << REG_GP_RSI },
	['Q'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX },
	['V'] = { MATCH_MEM, GP, 0 },
	['X'] = { MATCH_ANY, GP, 0 },
	['a'] = { MATCH_REG, GP, 1 << REG_GP_RAX },
	['b'] = { MATCH_REG, GP, 1 << REG_GP_RBX },
	['c'] = { MATCH_REG, GP, 1 << REG_GP_RCX },
	['d'] = { MATCH_REG, GP, 1 << REG_GP_RDX },
	['g'] = { MATCH_ANY, GP, 0 },
	['i'] = { MATCH_IMM, GP, 0 },
	['l'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX | 1 << REG_GP_RSI
		| 1 << REG_GP_RDI | 1 << REG_GP_RBP },
	['m'] = { MATCH_MEM, GP, 0 },
	['n'] = { MATCH_IMM, GP, 0 },
	['o'] = { MATCH_MEM, GP, 0 },
	['p'] = { MATCH_REG, GP, 0 },
	['q'] = { MATCH_REG, GP, 0 },
	['r'] = { MATCH_REG, GP, 0 },
	['x'] = { MATCH_REG, &amd64_reg_classes[CLASS_amd64_xmm], 0 },

	// see comments in ia32_transform.c about unimplemented stuff.
};
#undef GP

Matthias Braun's avatar
Matthias Braun committed
109
static const arch_register_req_t amd64_requirement_gp = {
110
111
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = NULL,
112
	.type            = arch_register_req_type_none,
113
114
115
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
116
117
118
};

static const arch_register_req_t amd64_requirement_flags = {
119
120
	.cls             = &amd64_reg_classes[CLASS_amd64_flags],
	.limited         = NULL,
121
	.type            = arch_register_req_type_none,
122
123
124
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
125
126
127
};

static const arch_register_req_t amd64_requirement_xmm = {
128
129
	.cls             = &amd64_reg_classes[CLASS_amd64_xmm],
	.limited         = NULL,
130
	.type            = arch_register_req_type_none,
131
132
133
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
134
135
};

Matthias Braun's avatar
Matthias Braun committed
136
137
138
#define BIT(x)    (1u << x)

static const arch_register_req_t amd64_requirement_gp_same_0 = {
139
140
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = NULL,
141
	.type            = arch_register_req_type_should_be_same,
142
143
144
	.other_same      = BIT(0),
	.other_different = 0,
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
145
146
};

147
static const arch_register_req_t amd64_requirement_xmm_same_0 = {
148
149
	.cls             = &amd64_reg_classes[CLASS_amd64_xmm],
	.limited         = NULL,
150
	.type            = arch_register_req_type_should_be_same,
151
152
153
	.other_same      = BIT(0),
	.other_different = 0,
	.width           = 1,
154
155
};

Matthias Braun's avatar
Matthias Braun committed
156
static const arch_register_req_t amd64_requirement_gp_same_0_not_1 = {
157
158
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = NULL,
159
	.type            = arch_register_req_type_should_be_same
160
161
162
163
	                   | arch_register_req_type_must_be_different,
	.other_same      = BIT(0),
	.other_different = BIT(1),
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
164
165
166
167
};

static const unsigned amd64_limited_gp_rcx [] = { BIT(REG_GP_RCX) };
static const arch_register_req_t amd64_requirement_rcx = {
168
169
170
171
172
173
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rcx,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
174
175
};

176
177
static const unsigned amd64_limited_gp_rax [] = { BIT(REG_GP_RAX) };
static const arch_register_req_t amd64_requirement_rax = {
178
179
180
181
182
183
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rax,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
184
185
};

186
187
static const unsigned amd64_limited_gp_rdx [] = { BIT(REG_GP_RDX) };
static const arch_register_req_t amd64_requirement_rdx = {
188
189
190
191
192
193
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rdx,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
194
195
};

Tobias Rapp's avatar
Tobias Rapp committed
196
197
198
199
200
201
202
203
204
205
static const unsigned amd64_limited_gp_rsp [] = { BIT(REG_GP_RSP) };
static const arch_register_req_t amd64_requirement_rsp = {
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rsp,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
};

Matthias Braun's avatar
Matthias Braun committed
206
207
208
209
210
211
static const arch_register_req_t *mem_reqs[] = {
	&arch_no_requirement,
};

static const arch_register_req_t *reg_mem_reqs[] = {
	&amd64_requirement_gp,
Matthias Braun's avatar
Matthias Braun committed
212
213
214
	&arch_no_requirement,
};

Tobias Rapp's avatar
Tobias Rapp committed
215
216
217
218
219
220
221
222
223
224
225
static const arch_register_req_t *rsp_mem_reqs[] = {
	&amd64_requirement_rsp,
	&arch_no_requirement,
};

static const arch_register_req_t *rsp_reg_mem_reqs[] = {
	&amd64_requirement_rsp,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

226
227
228
229
230
static const arch_register_req_t *xmm_mem_reqs[] = {
	&amd64_requirement_xmm,
	&arch_no_requirement,
};

Matthias Braun's avatar
Matthias Braun committed
231
232
static const arch_register_req_t *reg_reg_mem_reqs[] = {
	&amd64_requirement_gp,
Matthias Braun's avatar
Matthias Braun committed
233
234
235
236
	&amd64_requirement_gp,
	&arch_no_requirement,
};

237
238
239
240
241
242
static const arch_register_req_t *xmm_reg_mem_reqs[] = {
	&amd64_requirement_xmm,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

Matthias Braun's avatar
Matthias Braun committed
243
244
static const arch_register_req_t *reg_reg_reg_mem_reqs[] = {
	&amd64_requirement_gp,
Matthias Braun's avatar
Matthias Braun committed
245
246
247
248
249
	&amd64_requirement_gp,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

250
251
252
253
254
255
256
static const arch_register_req_t *xmm_reg_reg_mem_reqs[] = {
	&amd64_requirement_xmm,
	&amd64_requirement_gp,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

Tobias Rapp's avatar
Tobias Rapp committed
257
static const arch_register_req_t *reg_flags_reqs[] = {
258
259
260
261
	&amd64_requirement_gp,
	&amd64_requirement_flags,
};

Matthias Braun's avatar
Matthias Braun committed
262
263
static const arch_register_req_t *reg_reg_reqs[] = {
	&amd64_requirement_gp,
264
265
266
	&amd64_requirement_gp,
};

267
268
269
270
271
static const arch_register_req_t *rax_reg_reqs[] = {
	&amd64_requirement_rax,
	&amd64_requirement_gp,
};

272
static const arch_register_req_t *rax_reg_rdx_mem_reqs[] = {
273
274
275
	&amd64_requirement_rax,
	&amd64_requirement_gp,
	&amd64_requirement_rdx,
276
	&arch_no_requirement,
277
278
};

Matthias Braun's avatar
Matthias Braun committed
279
280
281
282
static const arch_register_req_t *reg_reqs[] = {
	&amd64_requirement_gp,
};

283
arch_register_req_t const *amd64_xmm_reqs[] = {
284
285
286
	&amd64_requirement_xmm,
};

Matthias Braun's avatar
Matthias Braun committed
287
288
289
290
291
static const arch_register_req_t *reg_rcx_reqs[] = {
	&amd64_requirement_gp,
	&amd64_requirement_rcx,
};

292
293
294
static const arch_register_req_t *no_reqs[] = {
};

295
296
297
298
299
static const arch_register_req_t *xmm_xmm_reqs[] = {
	&amd64_requirement_xmm,
	&amd64_requirement_xmm,
};

300
arch_register_req_t const **const gp_am_reqs[] = {
301
302
303
304
305
306
307
308
309
310
311
312
313
	mem_reqs,
	reg_mem_reqs,
	reg_reg_mem_reqs,
	reg_reg_reg_mem_reqs,
};

static arch_register_req_t const **const xmm_am_reqs[] = {
	mem_reqs,
	xmm_mem_reqs,
	xmm_reg_mem_reqs,
	xmm_reg_reg_mem_reqs,
};

Matthias Braun's avatar
Matthias Braun committed
314
315
static inline bool mode_needs_gp_reg(ir_mode *mode)
{
316
317
	return get_mode_arithmetic(mode) == irma_twos_complement
	    && mode != amd64_mode_xmm; /* mode_xmm is 128bit int at the moment */
Matthias Braun's avatar
Matthias Braun committed
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
}

static bool is_downconv(const ir_node *node)
{
	if (!is_Conv(node))
		return false;

	ir_mode *dest_mode = get_irn_mode(node);
	if (!mode_needs_gp_reg(dest_mode))
		return false;
	ir_mode *src_mode = get_irn_mode(get_Conv_op(node));
	if (!mode_needs_gp_reg(src_mode))
		return false;
	return get_mode_size_bits(dest_mode) <= get_mode_size_bits(src_mode);
}

static ir_node *skip_downconv(ir_node *node)
{
	while (is_downconv(node)) {
		if (get_irn_n_edges(node) > 1)
			break;
		node = get_Conv_op(node);
	}
	return node;
}

static bool is_sameconv(const ir_node *node)
{
	if (!is_Conv(node))
		return false;
	ir_mode *dest_mode = get_irn_mode(node);
	if (!mode_needs_gp_reg(dest_mode))
		return false;
	ir_mode *src_mode = get_irn_mode(get_Conv_op(node));
	if (!mode_needs_gp_reg(src_mode))
		return false;
	return get_mode_size_bits(dest_mode) == get_mode_size_bits(src_mode);
}

static ir_node *skip_sameconv(ir_node *node)
358
{
Matthias Braun's avatar
Matthias Braun committed
359
360
361
362
363
364
	while (is_sameconv(node)) {
		if (get_irn_n_edges(node) > 1)
			break;
		node = get_Conv_op(node);
	}
	return node;
365
366
}

367
368
static ir_node *get_initial_sp(ir_graph *irg)
{
369
	return be_get_start_proj(irg, &start_val[REG_RSP]);
370
371
372
373
}

static ir_node *get_initial_fp(ir_graph *irg)
{
374
	return be_get_start_proj(irg, &start_val[REG_RBP]);
375
376
377
378
}

static ir_node *get_initial_mem(ir_graph *irg)
{
379
	return be_get_start_proj(irg, &start_mem);
380
381
382
383
384
385
386
387
388
389
}

static ir_node *get_frame_base(ir_graph *irg)
{
	if (current_cconv->omit_fp) {
		return get_initial_sp(irg);
	} else {
		return get_initial_fp(irg);
	}
}
390

391
392
393
static amd64_insn_mode_t get_insn_mode_from_mode(const ir_mode *mode)
{
	switch (get_mode_size_bits(mode)) {
394
395
396
397
398
	case   8: return INSN_MODE_8;
	case  16: return INSN_MODE_16;
	case  32: return INSN_MODE_32;
	case  64: return INSN_MODE_64;
	case 128: return INSN_MODE_128;
399
400
401
402
	}
	panic("unexpected mode");
}

403
ir_entity *create_float_const_entity(ir_tarval *const tv)
404
{
405
	ir_entity *entity = pmap_get(ir_entity, amd64_constants, tv);
406
407
408
409
410
411
412
413
414
415
416
417
418
419
	if (entity != NULL)
		return entity;

	ir_mode *mode = get_tarval_mode(tv);
	ir_type *type = get_type_for_mode(mode);
	ir_type *glob = get_glob_type();

	entity = new_entity(glob, id_unique("C%u"), type);
	set_entity_visibility(entity, ir_visibility_private);
	add_entity_linkage(entity, IR_LINKAGE_CONSTANT);

	ir_initializer_t *initializer = create_initializer_tarval(tv);
	set_entity_initializer(entity, initializer);

420
	pmap_insert(amd64_constants, tv, entity);
421
422
423
	return entity;
}

424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
typedef enum reference_mode_t {
	REFERENCE_DIRECT,
	REFERENCE_IP_RELATIVE,
	REFERENCE_GOT,
} reference_mode_t;

static reference_mode_t need_relative_addressing(const ir_entity *entity)
{
	if (!be_options.pic)
		return REFERENCE_DIRECT;

	/* simply everything is instruction pointer relative, external functions
	 * use a global offset table */
	return entity_has_definition(entity)
	   && (get_entity_linkage(entity) & IR_LINKAGE_MERGE) == 0
	    ? REFERENCE_IP_RELATIVE : REFERENCE_GOT;
}

442
static ir_node *create_float_const(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
443
                                   ir_tarval *tv)
444
445
446
{
	ir_graph  *irg     = get_irn_irg(block);
	ir_mode   *tv_mode = get_tarval_mode(tv);
447
	ir_entity *entity  = create_float_const_entity(tv);
448
449
450
451
452
453
454
455
456
457
	ir_node   *nomem   = get_irg_no_mem(irg);

	ir_node *in[] = { nomem };
	amd64_addr_t addr;
	memset(&addr, 0, sizeof(addr));

	addr.immediate.entity       = entity;
	amd64_insn_mode_t insn_mode = get_insn_mode_from_mode(tv_mode);

	addr.index_input = NO_INPUT;
458
459
460
461
462
463
	if (need_relative_addressing(entity) == REFERENCE_DIRECT) {
		addr.base_input = NO_INPUT;
	} else {
		assert(need_relative_addressing(entity) == REFERENCE_IP_RELATIVE);
		addr.base_input = RIP_INPUT;
	}
464

465
	ir_node *load;
466
	unsigned pn_res;
467
468
469
	if (insn_mode == INSN_MODE_128) {
		load = new_bd_amd64_movdqa(dbgi, block, ARRAY_SIZE(in), in,
		                           AMD64_OP_ADDR, addr);
470
		pn_res = pn_amd64_movdqa_res;
471
	} else {
472
473
474
		load = new_bd_amd64_movs_xmm(dbgi, block, ARRAY_SIZE(in), in,
		                             insn_mode, AMD64_OP_ADDR, addr);
		pn_res = pn_amd64_movs_xmm_res;
475
	}
476
477
478
	arch_set_irn_register_reqs_in(load, mem_reqs);
	set_irn_pinned(load, op_pin_state_floats);

479
	return new_r_Proj(load, amd64_mode_xmm, pn_res);
480
481
482
483
484
}

ir_tarval *create_sign_tv(ir_mode *mode)
{
	unsigned size = get_mode_size_bits(mode);
Matthias Braun's avatar
Matthias Braun committed
485
486
487
488
	assert(size == 32 || size == 64 || size == 128);
	ir_mode *intmode = size == 128 ? amd64_mode_xmm
	                 : size == 64  ? mode_Lu
	                               : mode_Iu;
489
490
491
	ir_tarval *one  = get_mode_one(intmode);
	ir_tarval *sign = tarval_shl_unsigned(one, size-1);
	return tarval_bitcast(sign, mode);
492
493
}

494
495
static ir_node *gen_Const(ir_node *node)
{
496
	ir_node  *block = be_transform_nodes_block(node);
497
	dbg_info *dbgi  = get_irn_dbg_info(node);
498
	ir_mode  *mode  = get_irn_mode(node);
499
	ir_tarval *tv = get_Const_tarval(node);
500
501
502

	if (!mode_needs_gp_reg(mode)) {
		if (tarval_is_null(tv)) {
503
			return new_bd_amd64_xorpd_0(dbgi, block);
504
505
		}

506
		return create_float_const(dbgi, block, tv);
507
508
	}

509
510
	uint64_t val = get_tarval_uint64(tv);
	amd64_insn_mode_t imode = val > UINT32_MAX ? INSN_MODE_64 : INSN_MODE_32;
511
	return new_bd_amd64_mov_imm(dbgi, block, imode, val, NULL);
512
513
}

514
static ir_node *gen_Address(ir_node *node)
515
{
516
	ir_node   *block  = be_transform_nodes_block(node);
517
	dbg_info  *dbgi   = get_irn_dbg_info(node);
518
	ir_entity *entity = get_Address_entity(node);
519

Matthias Braun's avatar
Matthias Braun committed
520
521
	/* do we need RIP-relative addressing because of PIC? */
	reference_mode_t mode = need_relative_addressing(entity);
Matthias Braun's avatar
Matthias Braun committed
522
	if (mode == REFERENCE_DIRECT)
523
		return new_bd_amd64_mov_imm(dbgi, block, INSN_MODE_64, 0, entity);
Matthias Braun's avatar
Matthias Braun committed
524
525
526

	amd64_addr_t addr;
	memset(&addr, 0, sizeof(addr));
Matthias Braun's avatar
Matthias Braun committed
527
	addr.base_input  = RIP_INPUT;
Matthias Braun's avatar
Matthias Braun committed
528
529
	addr.index_input = NO_INPUT;
	addr.mem_input   = NO_INPUT;
Matthias Braun's avatar
Matthias Braun committed
530
531

	if (mode == REFERENCE_IP_RELATIVE) {
Matthias Braun's avatar
Matthias Braun committed
532
		addr.immediate.entity = entity;
533
		return new_bd_amd64_lea(dbgi, block, 0, NULL, INSN_MODE_64, addr);
Matthias Braun's avatar
Matthias Braun committed
534
535
	} else {
		assert(mode == REFERENCE_GOT);
Matthias Braun's avatar
Matthias Braun committed
536
		addr.immediate.entity = new_got_entry_entity(entity);
537
538
539
		ir_node *load = new_bd_amd64_mov_gp(dbgi, block, 0, NULL, INSN_MODE_64,
		                                    AMD64_OP_ADDR, addr);
		return new_r_Proj(load, mode_gp, pn_amd64_mov_gp_res);
Matthias Braun's avatar
Matthias Braun committed
540
541
542
	}
}

543
544
ir_node *amd64_new_IncSP(ir_node *block, ir_node *old_sp, int offset,
                         unsigned align)
Tobias Rapp's avatar
Tobias Rapp committed
545
{
546
547
548
549
	ir_node *incsp = be_new_IncSP(&amd64_registers[REG_RSP], block, old_sp,
	                              offset, align);
	arch_add_irn_flags(incsp, arch_irn_flag_modify_flags);
	return incsp;
Tobias Rapp's avatar
Tobias Rapp committed
550
551
}

Matthias Braun's avatar
Matthias Braun committed
552
typedef ir_node *(*construct_binop_func)(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
553
	int arity, ir_node *in[], const amd64_binop_addr_attr_t *attr_init);
Matthias Braun's avatar
Matthias Braun committed
554

555
556
557
558
typedef ir_node *(*construct_rax_binop_func)(dbg_info *dbgi, ir_node *block,
	int arity, ir_node *in[], amd64_insn_mode_t insn_mode,
	amd64_op_mode_t op_mode, amd64_addr_t addr);

Matthias Braun's avatar
Matthias Braun committed
559
560
561
562
563
564
565
566
typedef enum match_flags_t {
	match_am           = 1 << 0,
	match_mode_neutral = 1 << 1,
	match_immediate    = 1 << 2,
	match_commutative  = 1 << 3,
} match_flags_t;

typedef struct amd64_args_t {
Matthias Braun's avatar
Matthias Braun committed
567
568
569
570
	amd64_binop_addr_attr_t     attr;
	ir_node                    *mem_proj;
	ir_node                    *in[4];
	int                         arity;
Matthias Braun's avatar
Matthias Braun committed
571
572
573
	const arch_register_req_t **reqs;
} amd64_args_t;

Matthias Braun's avatar
Matthias Braun committed
574
static bool match_immediate_32(amd64_imm32_t *imm, const ir_node *op,
575
576
                               bool can_match_ip_relative,
                               bool upper32_dont_care)
Matthias Braun's avatar
Matthias Braun committed
577
578
{
	assert(mode_needs_gp_reg(get_irn_mode(op)));
Matthias Braun's avatar
Matthias Braun committed
579
	assert(imm->offset == 0 && imm->entity == NULL);
Matthias Braun's avatar
Matthias Braun committed
580
581
582
583
584
585
586
587
	if (is_Const(op)) {
		ir_tarval *tv = get_Const_tarval(op);
		if (!tarval_is_long(tv))
			return false;
		long    lval = get_tarval_long(tv);
		int32_t val  = (int32_t)lval;
		if ((long)val != lval)
			return false;
588
589
590
591
592
		/** the immediate value is signed extended to 64bit, sometimes
		 * this is not what we want. */
		if (!upper32_dont_care && val < 0
		    && !mode_is_signed(get_tarval_mode(tv)))
		    return false;
Matthias Braun's avatar
Matthias Braun committed
593
594
595
596
597
598
		imm->offset = val;
		return true;
	} else if (can_match_ip_relative && is_Address(op)) {
		/* TODO: check if entity is in lower 4GB address space/relative */
		ir_entity *entity = get_Address_entity(op);
		imm->entity = entity;
Matthias Braun's avatar
Matthias Braun committed
599
600
		return true;
	}
Matthias Braun's avatar
Matthias Braun committed
601
	/* TODO: SymConst, Add(SymConst, Const) ... */
Matthias Braun's avatar
Matthias Braun committed
602
603
604
605
606
	return false;
}

static ir_heights_t *heights;

607
608
609
610
611
612
613
614
615
static bool input_depends_on_load(ir_node *load, ir_node *input)
{
	ir_node *block = get_nodes_block(load);
	/* if the dependency is in another block, then we ignore it for now
	   as we only match address mode loads in the same block. */
	return get_nodes_block(input) == block
	    && heights_reachable_in_block(heights, input, load);
}

Tobias Rapp's avatar
Tobias Rapp committed
616
617
618
619
620
621
622
623
624
static void fix_node_mem_proj(ir_node *node, ir_node *mem_proj)
{
	if (mem_proj == NULL)
		return;

	ir_node *load = get_Proj_pred(mem_proj);
	be_set_transformed_node(load, node);
}

625
static ir_node *source_am_possible(ir_node *block, ir_node *node)
Matthias Braun's avatar
Matthias Braun committed
626
627
{
	if (!is_Proj(node))
628
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
629
630
	ir_node *load = get_Proj_pred(node);
	if (!is_Load(load))
631
		return NULL;
632
	assert(get_Proj_num(node) == pn_Load_res);
Matthias Braun's avatar
Matthias Braun committed
633
	if (get_nodes_block(load) != block)
634
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
635
636
	/* make sure we are the only user */
	if (get_irn_n_edges(node) != 1)
637
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
638
639
640
	/* ia32 backend claims this can happen, use an assert for now and see
	 * if we hit it :) */
	assert(!be_is_transformed(node));
641
	return load;
Matthias Braun's avatar
Matthias Braun committed
642
643
644
645
646
647
648
649
650
651
}

static bool needs_extension(ir_node *op)
{
	ir_mode *mode = get_irn_mode(op);
	if (get_mode_size_bits(mode) >= 32)
		return false;
	return !be_upper_bits_clean(op, mode);
}

652
static ir_node *create_sext(ir_node *new_block, ir_node *const node, ir_mode *mode)
653
654
655
{
	amd64_insn_mode_t insn_mode = get_insn_mode_from_mode(mode);
	dbg_info *const   dbgi      = get_irn_dbg_info(node);
Tobias Rapp's avatar
Tobias Rapp committed
656
	ir_node  *const   new_node  = be_transform_node(node);
657
658
659
660
661
662

	amd64_shift_attr_t attr;
	memset(&attr, 0, sizeof(attr));
	attr.base.op_mode = AMD64_OP_SHIFT_IMM;
	attr.insn_mode    = insn_mode;
	attr.immediate    = get_mode_size_bits(mode) - 1;
Tobias Rapp's avatar
Tobias Rapp committed
663
	ir_node *in[1]    = { new_node };
664
	ir_node *sar      = new_bd_amd64_sar(dbgi, new_block, ARRAY_SIZE(in),
Tobias Rapp's avatar
Tobias Rapp committed
665
	                                     in, &attr);
666

667
668
	arch_set_irn_register_reqs_in(sar, reg_reqs);
	arch_set_irn_register_req_out(sar, 0, &amd64_requirement_gp_same_0);
669
	return new_r_Proj(sar, mode_gp, pn_amd64_sar_res);
670
671
}

672
static ir_node *create_zext(ir_node *new_block, ir_node *const node)
673
{
Tobias Rapp's avatar
Tobias Rapp committed
674
	dbg_info *const dbgi      = get_irn_dbg_info(node);
675
	ir_node  *const xor0      = new_bd_amd64_xor_0(dbgi, new_block);
676
	arch_set_irn_register_reqs_in(xor0, reg_reqs);
677
	return new_r_Proj(xor0, mode_gp, pn_amd64_xor_0_res);
678
679
}

Tobias Rapp's avatar
Tobias Rapp committed
680
681
static bool use_address_matching(ir_mode *mode, match_flags_t flags,
                                 ir_node *block,
682
683
684
                                 ir_node *op1, ir_node *op2,
                                 ir_node **out_load, ir_node **out_op)
{
685
686
	if (! (flags & match_am))
		return false;
687

Tobias Rapp's avatar
Tobias Rapp committed
688
689
690
691
	unsigned mode_bits = get_mode_size_bits(mode);
	if (mode_bits == 8 || mode_bits == 16)
		return false;

692
693
694
	ir_node *load2 = source_am_possible(block, op2);
	if (load2 != NULL && !input_depends_on_load(load2, op1)) {
		(*out_load) = load2;
695
696
		(*out_op)   = op1;
		return true;
697
698
699
700
701
702
703
704
705
	}

	if (flags & match_commutative) {
		ir_node *load1 = source_am_possible(block, op1);
		if (load1 != NULL && !input_depends_on_load(load1, op2)) {
			(*out_load) = load1;
			(*out_op)   = op2;
			return true;
		}
706
707
708
709
	}
	return false;
}

710
711
static void perform_address_matching(ir_node *ptr, int *arity,
                                     ir_node **in, amd64_addr_t *addr)
712
713
714
{
	x86_address_t maddr;
	memset(&maddr, 0, sizeof(maddr));
715
	x86_create_address_mode(&maddr, ptr, x86_create_am_normal);
716
717
718
719
720
721

	if (maddr.base != NULL) {
		int base_input   = (*arity)++;
		addr->base_input = base_input;
		in[base_input]   = be_transform_node(maddr.base);
	} else {
722
723
724
725
726
727
728
		ir_entity *entity = maddr.entity;
		if (entity != NULL
		    && need_relative_addressing(entity) != REFERENCE_DIRECT) {
		    addr->base_input = RIP_INPUT;
		} else {
			addr->base_input = NO_INPUT;
		}
729
730
731
732
733
734
735
736
	}
	if (maddr.index != NULL) {
		int index_input = (*arity)++;
		addr->index_input = index_input;
		in[index_input]  = be_transform_node(maddr.index);
	} else {
		addr->index_input = NO_INPUT;
	}
737
738
739
740
741
742
743
744
745
746
	if (maddr.frame_entity != NULL) {
		assert(maddr.entity == NULL);
		addr->immediate.entity = maddr.frame_entity;
		/* not supported yet */
		assert(!is_parameter_entity(maddr.frame_entity)
		       || get_entity_parameter_number(maddr.frame_entity)
		          != IR_VA_START_PARAMETER_NUMBER);
	} else {
		addr->immediate.entity = maddr.entity;
	}
747
748
749
750
	addr->immediate.offset = maddr.offset;
	addr->log_scale        = maddr.scale;
}

Matthias Braun's avatar
Matthias Braun committed
751
752
753
static void match_binop(amd64_args_t *args, ir_node *block,
                        ir_mode *mode, ir_node *op1, ir_node *op2,
                        match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
754
755
756
{
	memset(args, 0, sizeof(*args));

757
	bool use_am;
758
	bool use_xmm       = mode_is_float(mode);
Matthias Braun's avatar
Matthias Braun committed
759
	bool use_immediate = flags & match_immediate;
760
	bool mode_neutral  = flags & match_mode_neutral;
Matthias Braun's avatar
Matthias Braun committed
761

Matthias Braun's avatar
Matthias Braun committed
762
	args->attr.base.insn_mode = get_insn_mode_from_mode(mode);
Matthias Braun's avatar
Matthias Braun committed
763
764

	/* TODO: legalize phase */
765
	if (mode_neutral) {
Matthias Braun's avatar
Matthias Braun committed
766
767
768
769
770
771
772
773
774
		op1 = skip_downconv(op1);
		op2 = skip_downconv(op2);
	} else {
		/* TODO: extend inputs? */
		(void)needs_extension;
	}

	ir_node *load;
	ir_node *op;
775

Tobias Rapp's avatar
Tobias Rapp committed
776
	use_am = use_address_matching(mode, flags, block, op1, op2, &load, &op);
777

778
779
	if (use_immediate
	    && match_immediate_32(&args->attr.u.immediate, op2, false, mode_neutral)) {
780
		assert(!use_xmm && "Can't (yet) match binop with xmm immediate");
Matthias Braun's avatar
Matthias Braun committed
781
		/* fine, we found an immediate */
Matthias Braun's avatar
Matthias Braun committed
782
783
784
		args->attr.base.base.op_mode = AMD64_OP_REG_IMM;
		args->in[args->arity++]      = be_transform_node(op1);
		args->reqs                   = reg_reqs;
785
	} else if (use_am) {
Matthias Braun's avatar
Matthias Braun committed
786
787
788
789
		ir_node *new_op        = be_transform_node(op);
		int      reg_input     = args->arity++;
		args->attr.u.reg_input = reg_input;
		args->in[reg_input]    = new_op;
790
		amd64_addr_t *addr     = &args->attr.base.addr;
Matthias Braun's avatar
Matthias Braun committed
791

792
793
794
		ir_node *ptr = get_Load_ptr(load);
		perform_address_matching(ptr, &(args->arity), args->in, addr);

795
796
		args->reqs = (use_xmm ? xmm_am_reqs : gp_am_reqs)[args->arity];

797
798
799
800
		ir_node *new_mem    = be_transform_node(get_Load_mem(load));
		int mem_input       = args->arity++;
		args->in[mem_input] = new_mem;
		addr->mem_input     = mem_input;
801

802
		args->mem_proj      = get_Proj_for_pn(load, pn_Load_M);
803
		args->attr.base.base.op_mode = AMD64_OP_ADDR_REG;
Matthias Braun's avatar
Matthias Braun committed
804
805
806
807
	} else {
		/* simply transform the arguments */
		args->in[args->arity++] = be_transform_node(op1);
		args->in[args->arity++] = be_transform_node(op2);
Matthias Braun's avatar
Matthias Braun committed
808
		args->attr.base.base.op_mode = AMD64_OP_REG_REG;
809

810
		args->reqs = use_xmm ? xmm_xmm_reqs : reg_reg_reqs;
Matthias Braun's avatar
Matthias Braun committed
811
812
813
814
	}
}

static ir_node *gen_binop_am(ir_node *node, ir_node *op1, ir_node *op2,
815
816
                             construct_binop_func func, unsigned pn_res,
                             match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
817
818
819
820
{
	ir_node *block = get_nodes_block(node);
	ir_mode *mode  = get_irn_mode(node);
	amd64_args_t args;
Matthias Braun's avatar
Matthias Braun committed
821
	match_binop(&args, block, mode, op1, op2, flags);
Matthias Braun's avatar
Matthias Braun committed
822
823
824
825

	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);

Matthias Braun's avatar
Matthias Braun committed
826
	ir_node *new_node = func(dbgi, new_block, args.arity, args.in, &args.attr);
Matthias Braun's avatar
Matthias Braun committed
827
	arch_set_irn_register_reqs_in(new_node, args.reqs);
828

Tobias Rapp's avatar
Tobias Rapp committed
829
	fix_node_mem_proj(new_node, args.mem_proj);
Matthias Braun's avatar
Matthias Braun committed
830

831
832
833
	if (mode_is_float(mode)) {
		arch_set_irn_register_req_out(new_node, 0,
		                              &amd64_requirement_xmm_same_0);
834
		return new_r_Proj(new_node, amd64_mode_xmm, pn_res);
835
836
837
	} else {
		arch_set_irn_register_req_out(new_node, 0,
		                              &amd64_requirement_gp_same_0);
838
		return new_r_Proj(new_node, mode_gp, pn_res);
839
	}
Matthias Braun's avatar
Matthias Braun committed
840
841
}

842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
static ir_node *gen_binop_rax(ir_node *node, ir_node *op1, ir_node *op2,
                              construct_rax_binop_func make_node,
                              match_flags_t flags)
{
	bool use_am;
	bool mode_neutral  = flags & match_mode_neutral;
	assert(! (flags & match_immediate));

	ir_mode *mode = get_irn_mode(op1);
	amd64_insn_mode_t insn_mode = get_insn_mode_from_mode(mode);

	/* TODO: legalize phase */
	if (mode_neutral) {
		op1 = skip_downconv(op1);
		op2 = skip_downconv(op2);
	} else {
		/* TODO: extend inputs? */
		(void)needs_extension;
	}

	ir_node *load;
	ir_node *op;
	ir_node *block = get_nodes_block(node);
	ir_node *in[4];
	int      arity = 0;
	const arch_register_req_t **reqs;
	amd64_op_mode_t op_mode;
	amd64_addr_t    addr;
	memset(&addr, 0, sizeof(addr));

Tobias Rapp's avatar
Tobias Rapp committed
872
	use_am = use_address_matching(mode, flags, block, op1, op2, &load, &op);
873

874
	ir_node *mem_proj = NULL;
875
876
877
878
879
	if (use_am) {
		ir_node *new_op    = be_transform_node(op);
		int      reg_input = arity++;
		in[reg_input]      = new_op;

880
881
882
		ir_node *ptr = get_Load_ptr(load);
		perform_address_matching(ptr, &arity, in, &addr);

883
		reqs = gp_am_reqs[arity];
884
885
886
887
888

		ir_node *new_mem = be_transform_node(get_Load_mem(load));
		int mem_input    = arity++;
		in[mem_input]    = new_mem;
		addr.mem_input   = mem_input;
889

890
		mem_proj                = get_Proj_for_pn(load, pn_Load_M);
891
892
893
		op_mode                 = AMD64_OP_RAX_ADDR;
	} else {
		/* simply transform the arguments */
894
895
896
897
		in[arity++] = be_transform_node(op1);
		in[arity++] = be_transform_node(op2);
		reqs        = rax_reg_reqs;
		op_mode     = AMD64_OP_RAX_REG;
898
899
	}

900
	assert((size_t)arity <= ARRAY_SIZE(in));
901
902
903
904
905
	dbg_info *dbgi      = get_irn_dbg_info(node);
	ir_node  *new_block = be_transform_node(block);
	ir_node  *new_node  = make_node(dbgi, new_block, arity, in, insn_mode,
	                                op_mode, addr);
	arch_set_irn_register_reqs_in(new_node, reqs);
906
907
908
	if (mem_proj != NULL) {
		be_set_transformed_node(load, new_node);
	}
909
910
911
	return new_node;
}

912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
static ir_node *gen_binop_xmm(ir_node *node, ir_node *op0, ir_node *op1,
                              construct_binop_func make_node,
                              match_flags_t flags)
{
	ir_node *block = get_nodes_block(node);
	ir_mode *mode  = get_irn_mode(op0);
	amd64_args_t args;
	memset(&args, 0, sizeof(args));

	ir_node *load;
	ir_node *op;
	bool use_am = use_address_matching(mode, flags, block, op0, op1, &load,
	                                   &op);

	if (use_am) {
		int reg_input = args.arity++;
		args.attr.u.reg_input = reg_input;
		args.in[reg_input]    = be_transform_node(op);

		amd64_addr_t *addr = &args.attr.base.addr;
		ir_node      *ptr  = get_Load_ptr(load);
		perform_address_matching(ptr, &args.arity, args.in, addr);

935
		args.reqs = xmm_am_reqs[args.arity];
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960

		ir_node *new_mem   = be_transform_node(get_Load_mem(load));
		int mem_input      = args.arity++;
		args.in[mem_input] = new_mem;
		addr->mem_input    = mem_input;

		args.mem_proj      = get_Proj_for_pn(load, pn_Load_M);
		args.attr.base.base.op_mode = AMD64_OP_ADDR_REG;
	} else {
		args.in[args.arity++] = be_transform_node(op0);
		args.in[args.arity++] = be_transform_node(op1);
		args.attr.base.base.op_mode = AMD64_OP_REG_REG;
		args.reqs = xmm_xmm_reqs;
	}

	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);
	ir_node *new_node = make_node(dbgi, new_block, args.arity, args.in,
	                              &args.attr);
	arch_set_irn_register_reqs_in(new_node, args.reqs);

	fix_node_mem_proj(new_node, args.mem_proj);

	arch_set_irn_register_req_out(new_node, 0,
								  &amd64_requirement_xmm_same_0);
961
	return new_r_Proj(new_node, amd64_mode_xmm, pn_amd64_subs_res);
962
963
}

Matthias Braun's avatar
Matthias Braun committed
964
typedef ir_node *(*construct_shift_func)(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
965
	int arity, ir_node *in[], const amd64_shift_attr_t *attr_init);
Matthias Braun's avatar
Matthias Braun committed
966
967

static ir_node *gen_shift_binop(ir_node *node, ir_node *op1, ir_node *op2,
968
969
                                construct_shift_func func, unsigned pn_res,
                                match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
970
971
972
973
974
975
976
977
978
979
980
981
{
	ir_mode *mode = get_irn_mode(node);
	assert(!mode_is_float(mode));

	if (get_mode_modulo_shift(mode) != 32 && get_mode_size_bits(mode) != 64)
		panic("insupported modulo shift used");

	ir_node *in[3];
	int      arity = 0;
	if (flags & match_mode_neutral) {
		op1 = skip_downconv(op1);
		in[arity++] = be_transform_node(op1);
Matthias Braun's avatar
Matthias Braun committed
982
		mode = get_mode_size_bits(mode) > 32 ? mode_gp : mode_Iu;
Matthias Braun's avatar
Matthias Braun committed
983
984
	} else {
		op1 = skip_sameconv(op1);
985
986
987

		/* Use 8/16bit operations instead of doing zext/upconv */
		in[arity++] = be_transform_node(op1);
Matthias Braun's avatar
Matthias Braun committed
988
989
990
991
992
993
	}

	/* we can skip any convs for the shift count, as it only uses the lowest
	 * 5/6 bits anyway */
	while (is_Conv(op2) && get_irn_n_edges(op2) == 1) {
		ir_node *const op = get_Conv_op(op2);
994
		if (get_mode_arithmetic(get_irn_mode(op)) != irma_twos_complement)
Matthias Braun's avatar
Matthias Braun committed
995
996
997
998
			break;
		op2 = op;
	}

Matthias Braun's avatar
Matthias Braun committed
999
1000
	amd64_shift_attr_t attr;
	memset(&attr, 0, sizeof(attr));
Matthias Braun's avatar
Matthias Braun committed
1001
1002
1003
	const arch_register_req_t **reqs;
	const arch_register_req_t  *out_req0;
	if (is_Const(op2)) {
Matthias Braun's avatar
Matthias Braun committed
1004
1005
1006
		attr.base.op_mode = AMD64_OP_SHIFT_IMM;
		reqs              = reg_reqs;
		out_req0          = &amd64_requirement_gp_same_0;
1007
		attr.immediate    = get_Const_long(op2);
Matthias Braun's avatar
Matthias Braun committed
1008
	} else {
Matthias Braun's avatar
Matthias Braun committed
1009
1010
1011
1012
		attr.base.op_mode = AMD64_OP_SHIFT_REG;
		in[arity++]       = be_transform_node(op2);
		reqs              = reg_rcx_reqs;
		out_req0          = &amd64_requirement_gp_same_0_not_1;
Matthias Braun's avatar
Matthias Braun committed
1013
	}
Matthias Braun's avatar
Matthias Braun committed
1014
	attr.insn_mode = get_insn_mode_from_mode(mode);
Matthias Braun's avatar
Matthias Braun committed
1015

Matthias Braun's avatar
Matthias Braun committed
1016
	dbg_info *const dbgi      = get_irn_dbg_info(node);
1017
	ir_node  *const new_block = be_transform_nodes_block(node);
Matthias Braun's avatar
Matthias Braun committed
1018
	ir_node  *const new_node  = func(dbgi, new_block, arity, in, &attr);
Matthias Braun's avatar
Matthias Braun committed
1019
1020
	arch_set_irn_register_reqs_in(new_node, reqs);
	arch_set_irn_register_req_out(new_node, 0, out_req0);
1021
	return new_r_Proj(new_node, mode_gp, pn_res);
1022
1023
}

1024
static ir_node *create_lea_as_add(ir_node *node, ir_node *op1, ir_node *op2)
1025
1026
{
	dbg_info *const dbgi = get_irn_dbg_info(node);
1027
	ir_node  *new_block  = be_transform_nodes_block(node);
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
	ir_mode *mode        = get_irn_mode(node);

	amd64_insn_mode_t insn_mode;
	if (get_mode_size_bits(mode) <= 32)
		insn_mode = INSN_MODE_32;
	else
		insn_mode = INSN_MODE_64;

	const arch_register_req_t **reqs;
	amd64_addr_t addr;
	memset(&addr, 0, sizeof(addr));

	ir_node *in[2];
	int arity = 0;

	if (match_immediate_32(&addr.immediate, op2, false, true)) {
		in[arity++]      = be_transform_node(op1);
		reqs             = reg_reqs;
		addr.index_input = NO_INPUT;
	} else {
		in[arity++]      = be_transform_node(op1);
		in[arity++]      = be_transform_node(op2);
1050
1051
		addr.base_input  = 0;
		addr.index_input = 1;
1052
1053
1054
		reqs             = reg_reg_reqs;
	}

1055
	ir_node *res = new_bd_amd64_lea(dbgi, new_block, arity, in, insn_mode, addr);
1056
	arch_set_irn_register_reqs_in(res, reqs);
1057
	return res;
1058
1059
}

Matthias Braun's avatar
Matthias Braun committed
1060
1061
static ir_node *gen_Add(ir_node *const node)
{
1062
1063
1064
	match_flags_t flags = match_immediate | match_am | match_mode_neutral
	                      | match_commutative;

Matthias Braun's avatar
Matthias Braun committed
1065
1066
	ir_node *op1 = get_Add_left(node);
	ir_node *op2 = get_Add_right(node);
1067
1068
1069
1070
1071

	ir_mode *mode  = get_irn_mode(node);
	ir_node *block = get_nodes_block(node);
	ir_node *load, *op;

1072
	if (mode_is_float(mode)) {
1073
1074
		return gen_binop_am(node, op1, op2, new_bd_amd64_adds,
							pn_amd64_adds_res, match_commutative | match_am);
1075
1076
	}

1077
1078
1079
1080
	bool use_am = use_address_matching(mode, flags, block, op1, op2, &load, &op);

	ir_node *res;
	if (use_am)
1081
1082
		res = gen_binop_am(node, op1, op2, new_bd_amd64_add, pn_amd64_add_res,
		                   flags);
1083
	else
1084
		res = create_lea_as_add(node, op1, op2);
1085

Matthias Braun's avatar
Matthias Braun committed
1086
1087
1088
	x86_mark_non_am(node);
	return res;
}
1089

Matthias Braun's avatar
Matthias Braun committed
1090
static ir_node *gen_Sub(ir_node *const node)
1091
{
Matthias Braun's avatar
Matthias Braun committed
1092
1093
	ir_node  *const op1     = get_Sub_left(node);
	ir_node  *const op2     = get_Sub_right(node);
1094
	ir_mode  *const mode    = get_irn_mode(node);
1095
1096

	if (mode_is_float(mode)) {
1097
1098
		return gen_binop_am(node, op1, op2, new_bd_amd64_subs,
		                    pn_amd64_subs_res, match_am);
1099
	} else {
1100
		/* TODO: do not match AM yet until we have a sub->neg+add rule
1101
		 * in amd64_finish */
1102
1103
		return gen_binop_am(node, op1, op2, new_bd_amd64_sub, pn_amd64_sub_res,
		                    match_immediate);
1104
	}
Matthias Braun's avatar
Matthias Braun committed
1105
1106
1107
1108
1109
1110
}

static ir_node *gen_And(ir_node *const node)
{
	ir_node *op1 = get_And_left(node);
	ir_node *op2 = get_And_right(node);
1111
	return gen_binop_am(node, op1, op2, new_bd_amd64_and, pn_amd64_and_res,
Matthias Braun's avatar
Matthias Braun committed
1112
1113
	                    match_immediate | match_am | match_mode_neutral
	                    | match_commutative);
1114
1115
}

Matthias Braun's avatar
Matthias Braun committed
1116
1117
1118
1119
static ir_node *gen_Eor(ir_node *const node)
{
	ir_node *op1 = get_Eor_left(node);
	ir_node *op2 = get_Eor_right(node);
1120
	return gen_binop_am(node, op1, op2, new_bd_amd64_xor, pn_amd64_xor_res,
Matthias Braun's avatar
Matthias Braun committed
1121
1122
1123
1124
1125
1126
1127
1128
	                    match_immediate | match_am | match_mode_neutral
	                    | match_commutative);
}

static ir_node *gen_Or(ir_node *const node)
{
	ir_node *op1 = get_Or_left(node);
	ir_node *op2 = get_Or_right(node);