amd64_transform.c 92.1 KB
Newer Older
1
2
/*
 * This file is part of libFirm.
3
 * Copyright (C) 2012 University of Karlsruhe.
4
5
6
7
8
9
 */

/**
 * @file
 * @brief   code selection (transform FIRM into amd64 FIRM)
 */
Matthias Braun's avatar
Matthias Braun committed
10
#include "debug.h"
Matthias Braun's avatar
Matthias Braun committed
11
#include "panic.h"
Matthias Braun's avatar
Matthias Braun committed
12
13
14
#include "heights.h"
#include "ircons.h"
#include "irgmod.h"
15
16
#include "irgraph_t.h"
#include "irmode_t.h"
Matthias Braun's avatar
Matthias Braun committed
17
#include "irnode_t.h"
18
#include "iropt_t.h"
19
#include "tv_t.h"
Matthias Braun's avatar
Matthias Braun committed
20
#include "util.h"
21

22
23
#include "benode.h"
#include "betranshlp.h"
24
#include "bearch_amd64_t.h"
25
#include "beirg.h"
Matthias Braun's avatar
Matthias Braun committed
26
#include "besched.h"
27

Matthias Braun's avatar
Matthias Braun committed
28
#include "amd64_new_nodes.h"
29
30
#include "amd64_nodes_attr.h"
#include "amd64_transform.h"
Matthias Braun's avatar
Matthias Braun committed
31
#include "../ia32/x86_address_mode.h"
32
#include "../ia32/x86_cconv.h"
33
34
35
36
37

#include "gen_amd64_regalloc_if.h"

DEBUG_ONLY(static firm_dbg_module_t *dbg = NULL;)

38
static ir_mode         *mode_gp;
Matthias Braun's avatar
Matthias Braun committed
39
static ir_mode         *mode_flags;
40
static x86_cconv_t     *current_cconv = NULL;
41
static be_start_info_t  start_mem;
42
static be_start_info_t  start_val[N_AMD64_REGISTERS];
43
44
45
static size_t           start_params_offset;
static pmap            *node_to_stack;
static be_stackorder_t *stackorder;
46

47
48
49
/** we don't have a concept of aliasing registers, so enumerate them
 * manually for the asm nodes. */
const x86_clobber_name_t amd64_additional_clobber_names[] = {
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
	{ "al", REG_RAX }, { "ah", REG_RAX }, { "ax", REG_RAX }, { "eax", REG_RAX },
	{ "bl", REG_RBX }, { "bh", REG_RBX }, { "bx", REG_RBX }, { "ebx", REG_RBX },
	{ "cl", REG_RCX }, { "ch", REG_RCX }, { "cx", REG_RCX }, { "ecx", REG_RCX },
	{ "dl", REG_RDX }, { "dh", REG_RDX }, { "dx", REG_RDX }, { "edx", REG_RDX },
	{ "sil",  REG_RSI }, { "si",   REG_RSI }, { "esi",  REG_RSI },
	{ "dil",  REG_RDI }, { "di",   REG_RDI }, { "edi",  REG_RDI },
	{ "bpl",  REG_RBP }, { "bp",   REG_RBP }, { "ebp",  REG_RBP },
	{ "spl",  REG_RSP }, { "sp",   REG_RSP }, { "esp",  REG_RSP },
	{ "r8b",  REG_R8  }, { "r8w",  REG_R8  }, { "r8d",  REG_R8  },
	{ "r9b",  REG_R9  }, { "r9w",  REG_R9  }, { "r9d",  REG_R9  },
	{ "r10b", REG_R10 }, { "r10w", REG_R10 }, { "r10d", REG_R10 },
	{ "r11b", REG_R11 }, { "r11w", REG_R11 }, { "r11d", REG_R11 },
	{ "r12b", REG_R12 }, { "r12w", REG_R12 }, { "r12d", REG_R12 },
	{ "r13b", REG_R13 }, { "r13w", REG_R13 }, { "r13d", REG_R13 },
	{ "r14b", REG_R14 }, { "r14w", REG_R14 }, { "r14d", REG_R14 },
	{ "r15b", REG_R15 }, { "r15w", REG_R15 }, { "r15d", REG_R15 },
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
	{ NULL, ~0u }
};

#define GP &amd64_reg_classes[CLASS_amd64_gp]
const x86_asm_constraint_t amd64_asm_constraints[128] = {
	['A'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RDX },
	['D'] = { MATCH_REG, GP, 1 << REG_GP_RDI },
	['I'] = { MATCH_IMM, GP, 0 },
	['J'] = { MATCH_IMM, GP, 0 },
	['K'] = { MATCH_IMM, GP, 0 },
	['L'] = { MATCH_IMM, GP, 0 },
	['M'] = { MATCH_IMM, GP, 0 },
	['N'] = { MATCH_IMM, GP, 0 },
	['O'] = { MATCH_IMM, GP, 0 },
	['R'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX | 1 << REG_GP_RSI
		| 1 << REG_GP_RDI | 1 << REG_GP_RBP | 1 << REG_GP_RSP },
	['S'] = { MATCH_REG, GP, 1 << REG_GP_RSI },
	['Q'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX },
	['V'] = { MATCH_MEM, GP, 0 },
	['X'] = { MATCH_ANY, GP, 0 },
	['a'] = { MATCH_REG, GP, 1 << REG_GP_RAX },
	['b'] = { MATCH_REG, GP, 1 << REG_GP_RBX },
	['c'] = { MATCH_REG, GP, 1 << REG_GP_RCX },
	['d'] = { MATCH_REG, GP, 1 << REG_GP_RDX },
	['g'] = { MATCH_ANY, GP, 0 },
	['i'] = { MATCH_IMM, GP, 0 },
	['l'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX | 1 << REG_GP_RSI
		| 1 << REG_GP_RDI | 1 << REG_GP_RBP },
	['m'] = { MATCH_MEM, GP, 0 },
	['n'] = { MATCH_IMM, GP, 0 },
	['o'] = { MATCH_MEM, GP, 0 },
	['p'] = { MATCH_REG, GP, 0 },
	['q'] = { MATCH_REG, GP, 0 },
	['r'] = { MATCH_REG, GP, 0 },
	['x'] = { MATCH_REG, &amd64_reg_classes[CLASS_amd64_xmm], 0 },

	// see comments in ia32_transform.c about unimplemented stuff.
};
#undef GP

Matthias Braun's avatar
Matthias Braun committed
109
static const arch_register_req_t amd64_requirement_gp = {
110
111
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = NULL,
112
	.type            = arch_register_req_type_none,
113
114
115
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
116
117
118
};

static const arch_register_req_t amd64_requirement_flags = {
119
120
	.cls             = &amd64_reg_classes[CLASS_amd64_flags],
	.limited         = NULL,
121
	.type            = arch_register_req_type_none,
122
123
124
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
125
126
127
};

static const arch_register_req_t amd64_requirement_xmm = {
128
129
	.cls             = &amd64_reg_classes[CLASS_amd64_xmm],
	.limited         = NULL,
130
	.type            = arch_register_req_type_none,
131
132
133
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
134
135
};

Matthias Braun's avatar
Matthias Braun committed
136
137
138
#define BIT(x)    (1u << x)

static const arch_register_req_t amd64_requirement_gp_same_0 = {
139
140
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = NULL,
141
	.type            = arch_register_req_type_should_be_same,
142
143
144
	.other_same      = BIT(0),
	.other_different = 0,
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
145
146
};

147
static const arch_register_req_t amd64_requirement_xmm_same_0 = {
148
149
	.cls             = &amd64_reg_classes[CLASS_amd64_xmm],
	.limited         = NULL,
150
	.type            = arch_register_req_type_should_be_same,
151
152
153
	.other_same      = BIT(0),
	.other_different = 0,
	.width           = 1,
154
155
};

Matthias Braun's avatar
Matthias Braun committed
156
static const arch_register_req_t amd64_requirement_gp_same_0_not_1 = {
157
158
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = NULL,
159
	.type            = arch_register_req_type_should_be_same
160
161
162
163
	                   | arch_register_req_type_must_be_different,
	.other_same      = BIT(0),
	.other_different = BIT(1),
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
164
165
166
167
};

static const unsigned amd64_limited_gp_rcx [] = { BIT(REG_GP_RCX) };
static const arch_register_req_t amd64_requirement_rcx = {
168
169
170
171
172
173
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rcx,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
Matthias Braun's avatar
Matthias Braun committed
174
175
};

176
177
static const unsigned amd64_limited_gp_rax [] = { BIT(REG_GP_RAX) };
static const arch_register_req_t amd64_requirement_rax = {
178
179
180
181
182
183
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rax,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
184
185
};

186
187
static const unsigned amd64_limited_gp_rdx [] = { BIT(REG_GP_RDX) };
static const arch_register_req_t amd64_requirement_rdx = {
188
189
190
191
192
193
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rdx,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
194
195
};

Tobias Rapp's avatar
Tobias Rapp committed
196
197
198
199
200
201
202
203
204
205
static const unsigned amd64_limited_gp_rsp [] = { BIT(REG_GP_RSP) };
static const arch_register_req_t amd64_requirement_rsp = {
	.cls             = &amd64_reg_classes[CLASS_amd64_gp],
	.limited         = amd64_limited_gp_rsp,
	.type            = arch_register_req_type_limited,
	.other_same      = 0,
	.other_different = 0,
	.width           = 1,
};

Matthias Braun's avatar
Matthias Braun committed
206
207
208
209
210
211
static const arch_register_req_t *mem_reqs[] = {
	&arch_no_requirement,
};

static const arch_register_req_t *reg_mem_reqs[] = {
	&amd64_requirement_gp,
Matthias Braun's avatar
Matthias Braun committed
212
213
214
	&arch_no_requirement,
};

Tobias Rapp's avatar
Tobias Rapp committed
215
216
217
218
219
220
221
222
223
224
225
static const arch_register_req_t *rsp_mem_reqs[] = {
	&amd64_requirement_rsp,
	&arch_no_requirement,
};

static const arch_register_req_t *rsp_reg_mem_reqs[] = {
	&amd64_requirement_rsp,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

226
227
228
229
230
static const arch_register_req_t *xmm_mem_reqs[] = {
	&amd64_requirement_xmm,
	&arch_no_requirement,
};

Matthias Braun's avatar
Matthias Braun committed
231
232
static const arch_register_req_t *reg_reg_mem_reqs[] = {
	&amd64_requirement_gp,
Matthias Braun's avatar
Matthias Braun committed
233
234
235
236
	&amd64_requirement_gp,
	&arch_no_requirement,
};

237
238
239
240
241
242
static const arch_register_req_t *xmm_reg_mem_reqs[] = {
	&amd64_requirement_xmm,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

Matthias Braun's avatar
Matthias Braun committed
243
244
static const arch_register_req_t *reg_reg_reg_mem_reqs[] = {
	&amd64_requirement_gp,
Matthias Braun's avatar
Matthias Braun committed
245
246
247
248
249
	&amd64_requirement_gp,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

250
251
252
253
254
255
256
static const arch_register_req_t *xmm_reg_reg_mem_reqs[] = {
	&amd64_requirement_xmm,
	&amd64_requirement_gp,
	&amd64_requirement_gp,
	&arch_no_requirement,
};

Tobias Rapp's avatar
Tobias Rapp committed
257
static const arch_register_req_t *reg_flags_reqs[] = {
258
259
260
261
	&amd64_requirement_gp,
	&amd64_requirement_flags,
};

Matthias Braun's avatar
Matthias Braun committed
262
263
static const arch_register_req_t *reg_reg_reqs[] = {
	&amd64_requirement_gp,
264
265
266
	&amd64_requirement_gp,
};

267
268
269
270
271
static const arch_register_req_t *rax_reg_reqs[] = {
	&amd64_requirement_rax,
	&amd64_requirement_gp,
};

272
static const arch_register_req_t *rax_reg_rdx_mem_reqs[] = {
273
274
275
	&amd64_requirement_rax,
	&amd64_requirement_gp,
	&amd64_requirement_rdx,
276
	&arch_no_requirement,
277
278
};

Matthias Braun's avatar
Matthias Braun committed
279
280
281
282
static const arch_register_req_t *reg_reqs[] = {
	&amd64_requirement_gp,
};

283
284
285
286
static const arch_register_req_t *xmm_reqs[] = {
	&amd64_requirement_xmm,
};

Matthias Braun's avatar
Matthias Braun committed
287
288
289
290
291
static const arch_register_req_t *reg_rcx_reqs[] = {
	&amd64_requirement_gp,
	&amd64_requirement_rcx,
};

292
293
294
static const arch_register_req_t *no_reqs[] = {
};

295
296
297
298
299
static const arch_register_req_t *xmm_xmm_reqs[] = {
	&amd64_requirement_xmm,
	&amd64_requirement_xmm,
};

Matthias Braun's avatar
Matthias Braun committed
300
301
static inline bool mode_needs_gp_reg(ir_mode *mode)
{
302
303
	return get_mode_arithmetic(mode) == irma_twos_complement
	    && mode != amd64_mode_xmm; /* mode_xmm is 128bit int at the moment */
Matthias Braun's avatar
Matthias Braun committed
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
}

static bool is_downconv(const ir_node *node)
{
	if (!is_Conv(node))
		return false;

	ir_mode *dest_mode = get_irn_mode(node);
	if (!mode_needs_gp_reg(dest_mode))
		return false;
	ir_mode *src_mode = get_irn_mode(get_Conv_op(node));
	if (!mode_needs_gp_reg(src_mode))
		return false;
	return get_mode_size_bits(dest_mode) <= get_mode_size_bits(src_mode);
}

static ir_node *skip_downconv(ir_node *node)
{
	while (is_downconv(node)) {
		if (get_irn_n_edges(node) > 1)
			break;
		node = get_Conv_op(node);
	}
	return node;
}

static bool is_sameconv(const ir_node *node)
{
	if (!is_Conv(node))
		return false;
	ir_mode *dest_mode = get_irn_mode(node);
	if (!mode_needs_gp_reg(dest_mode))
		return false;
	ir_mode *src_mode = get_irn_mode(get_Conv_op(node));
	if (!mode_needs_gp_reg(src_mode))
		return false;
	return get_mode_size_bits(dest_mode) == get_mode_size_bits(src_mode);
}

static ir_node *skip_sameconv(ir_node *node)
344
{
Matthias Braun's avatar
Matthias Braun committed
345
346
347
348
349
350
	while (is_sameconv(node)) {
		if (get_irn_n_edges(node) > 1)
			break;
		node = get_Conv_op(node);
	}
	return node;
351
352
}

353
354
static ir_node *get_initial_sp(ir_graph *irg)
{
355
	return be_get_start_proj(irg, &start_val[REG_RSP]);
356
357
358
359
}

static ir_node *get_initial_fp(ir_graph *irg)
{
360
	return be_get_start_proj(irg, &start_val[REG_RBP]);
361
362
363
364
}

static ir_node *get_initial_mem(ir_graph *irg)
{
365
	return be_get_start_proj(irg, &start_mem);
366
367
368
369
370
371
372
373
374
375
}

static ir_node *get_frame_base(ir_graph *irg)
{
	if (current_cconv->omit_fp) {
		return get_initial_sp(irg);
	} else {
		return get_initial_fp(irg);
	}
}
376

377
378
379
static amd64_insn_mode_t get_insn_mode_from_mode(const ir_mode *mode)
{
	switch (get_mode_size_bits(mode)) {
380
381
382
383
384
	case   8: return INSN_MODE_8;
	case  16: return INSN_MODE_16;
	case  32: return INSN_MODE_32;
	case  64: return INSN_MODE_64;
	case 128: return INSN_MODE_128;
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
	}
	panic("unexpected mode");
}

ir_entity *create_float_const_entity(ir_graph *const irg,
                                     ir_tarval *const tv)
{
	const arch_env_t *arch_env = be_get_irg_arch_env(irg);
	amd64_isa_t      *isa      = (amd64_isa_t*) arch_env;
	ir_entity        *entity   = pmap_get(ir_entity, isa->constants, tv);
	if (entity != NULL)
		return entity;

	ir_mode *mode = get_tarval_mode(tv);
	ir_type *type = get_type_for_mode(mode);
	ir_type *glob = get_glob_type();

	entity = new_entity(glob, id_unique("C%u"), type);
	set_entity_visibility(entity, ir_visibility_private);
	add_entity_linkage(entity, IR_LINKAGE_CONSTANT);

	ir_initializer_t *initializer = create_initializer_tarval(tv);
	set_entity_initializer(entity, initializer);

	pmap_insert(isa->constants, tv, entity);
	return entity;
}

413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
typedef enum reference_mode_t {
	REFERENCE_DIRECT,
	REFERENCE_IP_RELATIVE,
	REFERENCE_GOT,
} reference_mode_t;

static reference_mode_t need_relative_addressing(const ir_entity *entity)
{
	if (!be_options.pic)
		return REFERENCE_DIRECT;

	/* simply everything is instruction pointer relative, external functions
	 * use a global offset table */
	return entity_has_definition(entity)
	   && (get_entity_linkage(entity) & IR_LINKAGE_MERGE) == 0
	    ? REFERENCE_IP_RELATIVE : REFERENCE_GOT;
}

431
static ir_node *create_float_const(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
432
                                   ir_tarval *tv)
433
434
435
436
437
438
439
440
441
442
443
444
445
446
{
	ir_graph  *irg     = get_irn_irg(block);
	ir_mode   *tv_mode = get_tarval_mode(tv);
	ir_entity *entity  = create_float_const_entity(irg, tv);
	ir_node   *nomem   = get_irg_no_mem(irg);

	ir_node *in[] = { nomem };
	amd64_addr_t addr;
	memset(&addr, 0, sizeof(addr));

	addr.immediate.entity       = entity;
	amd64_insn_mode_t insn_mode = get_insn_mode_from_mode(tv_mode);

	addr.index_input = NO_INPUT;
447
448
449
450
451
452
	if (need_relative_addressing(entity) == REFERENCE_DIRECT) {
		addr.base_input = NO_INPUT;
	} else {
		assert(need_relative_addressing(entity) == REFERENCE_IP_RELATIVE);
		addr.base_input = RIP_INPUT;
	}
453

454
	ir_node *load;
455
	unsigned pn_res;
456
457
458
	if (insn_mode == INSN_MODE_128) {
		load = new_bd_amd64_movdqa(dbgi, block, ARRAY_SIZE(in), in,
		                           AMD64_OP_ADDR, addr);
459
		pn_res = pn_amd64_movdqa_res;
460
	} else {
461
462
463
		load = new_bd_amd64_movs_xmm(dbgi, block, ARRAY_SIZE(in), in,
		                             insn_mode, AMD64_OP_ADDR, addr);
		pn_res = pn_amd64_movs_xmm_res;
464
	}
465
466
467
	arch_set_irn_register_reqs_in(load, mem_reqs);
	set_irn_pinned(load, op_pin_state_floats);

468
	return new_r_Proj(load, amd64_mode_xmm, pn_res);
469
470
471
472
473
}

ir_tarval *create_sign_tv(ir_mode *mode)
{
	unsigned size = get_mode_size_bits(mode);
Matthias Braun's avatar
Matthias Braun committed
474
475
476
477
	assert(size == 32 || size == 64 || size == 128);
	ir_mode *intmode = size == 128 ? amd64_mode_xmm
	                 : size == 64  ? mode_Lu
	                               : mode_Iu;
478
479
480
	ir_tarval *one  = get_mode_one(intmode);
	ir_tarval *sign = tarval_shl_unsigned(one, size-1);
	return tarval_bitcast(sign, mode);
481
482
}

483
484
static ir_node *gen_Const(ir_node *node)
{
485
	ir_node  *block = be_transform_node(get_nodes_block(node));
486
	dbg_info *dbgi  = get_irn_dbg_info(node);
487
	ir_mode  *mode  = get_irn_mode(node);
488
	ir_tarval *tv = get_Const_tarval(node);
489
490
491

	if (!mode_needs_gp_reg(mode)) {
		if (tarval_is_null(tv)) {
492
			return new_bd_amd64_xorpd_0(dbgi, block);
493
494
		}

495
		return create_float_const(dbgi, block, tv);
496
497
	}

498
499
	uint64_t val = get_tarval_uint64(tv);
	amd64_insn_mode_t imode = val > UINT32_MAX ? INSN_MODE_64 : INSN_MODE_32;
500
	return new_bd_amd64_mov_imm(dbgi, block, imode, val, NULL);
501
502
}

503
static ir_node *gen_Address(ir_node *node)
504
505
506
{
	ir_node   *block  = be_transform_node(get_nodes_block(node));
	dbg_info  *dbgi   = get_irn_dbg_info(node);
507
	ir_entity *entity = get_Address_entity(node);
508

Matthias Braun's avatar
Matthias Braun committed
509
510
	/* do we need RIP-relative addressing because of PIC? */
	reference_mode_t mode = need_relative_addressing(entity);
Matthias Braun's avatar
Matthias Braun committed
511
	if (mode == REFERENCE_DIRECT)
512
		return new_bd_amd64_mov_imm(dbgi, block, INSN_MODE_64, 0, entity);
Matthias Braun's avatar
Matthias Braun committed
513
514
515

	amd64_addr_t addr;
	memset(&addr, 0, sizeof(addr));
516
	addr.base_input  = NO_INPUT;
Matthias Braun's avatar
Matthias Braun committed
517
518
	addr.index_input = NO_INPUT;
	addr.mem_input   = NO_INPUT;
Matthias Braun's avatar
Matthias Braun committed
519
520

	if (mode == REFERENCE_IP_RELATIVE) {
521
		addr.base_input       = RIP_INPUT;
Matthias Braun's avatar
Matthias Braun committed
522
		addr.immediate.entity = entity;
523
		return new_bd_amd64_lea(dbgi, block, 0, NULL, INSN_MODE_64, addr);
Matthias Braun's avatar
Matthias Braun committed
524
525
	} else {
		assert(mode == REFERENCE_GOT);
Matthias Braun's avatar
Matthias Braun committed
526
		addr.immediate.entity = new_got_entry_entity(entity);
527
528
529
		ir_node *load = new_bd_amd64_mov_gp(dbgi, block, 0, NULL, INSN_MODE_64,
		                                    AMD64_OP_ADDR, addr);
		return new_r_Proj(load, mode_gp, pn_amd64_mov_gp_res);
Matthias Braun's avatar
Matthias Braun committed
530
531
532
	}
}

533
534
ir_node *amd64_new_IncSP(ir_node *block, ir_node *old_sp, int offset,
                         unsigned align)
Tobias Rapp's avatar
Tobias Rapp committed
535
{
536
537
538
539
	ir_node *incsp = be_new_IncSP(&amd64_registers[REG_RSP], block, old_sp,
	                              offset, align);
	arch_add_irn_flags(incsp, arch_irn_flag_modify_flags);
	return incsp;
Tobias Rapp's avatar
Tobias Rapp committed
540
541
}

Matthias Braun's avatar
Matthias Braun committed
542
typedef ir_node *(*construct_binop_func)(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
543
	int arity, ir_node *in[], const amd64_binop_addr_attr_t *attr_init);
Matthias Braun's avatar
Matthias Braun committed
544

545
546
547
548
typedef ir_node *(*construct_rax_binop_func)(dbg_info *dbgi, ir_node *block,
	int arity, ir_node *in[], amd64_insn_mode_t insn_mode,
	amd64_op_mode_t op_mode, amd64_addr_t addr);

Matthias Braun's avatar
Matthias Braun committed
549
550
551
552
553
554
555
556
typedef enum match_flags_t {
	match_am           = 1 << 0,
	match_mode_neutral = 1 << 1,
	match_immediate    = 1 << 2,
	match_commutative  = 1 << 3,
} match_flags_t;

typedef struct amd64_args_t {
Matthias Braun's avatar
Matthias Braun committed
557
558
559
560
	amd64_binop_addr_attr_t     attr;
	ir_node                    *mem_proj;
	ir_node                    *in[4];
	int                         arity;
Matthias Braun's avatar
Matthias Braun committed
561
562
563
	const arch_register_req_t **reqs;
} amd64_args_t;

Matthias Braun's avatar
Matthias Braun committed
564
static bool match_immediate_32(amd64_imm32_t *imm, const ir_node *op,
565
566
                               bool can_match_ip_relative,
                               bool upper32_dont_care)
Matthias Braun's avatar
Matthias Braun committed
567
568
{
	assert(mode_needs_gp_reg(get_irn_mode(op)));
Matthias Braun's avatar
Matthias Braun committed
569
	assert(imm->offset == 0 && imm->entity == NULL);
Matthias Braun's avatar
Matthias Braun committed
570
571
572
573
574
575
576
577
	if (is_Const(op)) {
		ir_tarval *tv = get_Const_tarval(op);
		if (!tarval_is_long(tv))
			return false;
		long    lval = get_tarval_long(tv);
		int32_t val  = (int32_t)lval;
		if ((long)val != lval)
			return false;
578
579
580
581
582
		/** the immediate value is signed extended to 64bit, sometimes
		 * this is not what we want. */
		if (!upper32_dont_care && val < 0
		    && !mode_is_signed(get_tarval_mode(tv)))
		    return false;
Matthias Braun's avatar
Matthias Braun committed
583
584
585
586
587
588
		imm->offset = val;
		return true;
	} else if (can_match_ip_relative && is_Address(op)) {
		/* TODO: check if entity is in lower 4GB address space/relative */
		ir_entity *entity = get_Address_entity(op);
		imm->entity = entity;
Matthias Braun's avatar
Matthias Braun committed
589
590
		return true;
	}
Matthias Braun's avatar
Matthias Braun committed
591
	/* TODO: SymConst, Add(SymConst, Const) ... */
Matthias Braun's avatar
Matthias Braun committed
592
593
594
595
596
	return false;
}

static ir_heights_t *heights;

597
598
599
600
601
602
603
604
605
static bool input_depends_on_load(ir_node *load, ir_node *input)
{
	ir_node *block = get_nodes_block(load);
	/* if the dependency is in another block, then we ignore it for now
	   as we only match address mode loads in the same block. */
	return get_nodes_block(input) == block
	    && heights_reachable_in_block(heights, input, load);
}

Tobias Rapp's avatar
Tobias Rapp committed
606
607
608
609
610
611
612
613
614
static void fix_node_mem_proj(ir_node *node, ir_node *mem_proj)
{
	if (mem_proj == NULL)
		return;

	ir_node *load = get_Proj_pred(mem_proj);
	be_set_transformed_node(load, node);
}

615
static ir_node *source_am_possible(ir_node *block, ir_node *node)
Matthias Braun's avatar
Matthias Braun committed
616
617
{
	if (!is_Proj(node))
618
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
619
620
	ir_node *load = get_Proj_pred(node);
	if (!is_Load(load))
621
		return NULL;
622
	assert(get_Proj_num(node) == pn_Load_res);
Matthias Braun's avatar
Matthias Braun committed
623
	if (get_nodes_block(load) != block)
624
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
625
626
	/* make sure we are the only user */
	if (get_irn_n_edges(node) != 1)
627
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
628
629
630
	/* ia32 backend claims this can happen, use an assert for now and see
	 * if we hit it :) */
	assert(!be_is_transformed(node));
631
	return load;
Matthias Braun's avatar
Matthias Braun committed
632
633
634
635
636
637
638
639
640
641
}

static bool needs_extension(ir_node *op)
{
	ir_mode *mode = get_irn_mode(op);
	if (get_mode_size_bits(mode) >= 32)
		return false;
	return !be_upper_bits_clean(op, mode);
}

Tobias Rapp's avatar
Tobias Rapp committed
642
static ir_node *create_sext(ir_node *block, ir_node *const node, ir_mode *mode)
643
644
645
{
	amd64_insn_mode_t insn_mode = get_insn_mode_from_mode(mode);
	dbg_info *const   dbgi      = get_irn_dbg_info(node);
Tobias Rapp's avatar
Tobias Rapp committed
646
647
	ir_node  *const   new_node  = be_transform_node(node);
	ir_node  *const   new_block = be_transform_node(block);
648
649
650
651
652
653

	amd64_shift_attr_t attr;
	memset(&attr, 0, sizeof(attr));
	attr.base.op_mode = AMD64_OP_SHIFT_IMM;
	attr.insn_mode    = insn_mode;
	attr.immediate    = get_mode_size_bits(mode) - 1;
Tobias Rapp's avatar
Tobias Rapp committed
654
	ir_node *in[1]    = { new_node };
655
	ir_node *sar      = new_bd_amd64_sar(dbgi, new_block, ARRAY_SIZE(in),
Tobias Rapp's avatar
Tobias Rapp committed
656
	                                     in, &attr);
657

658
659
	arch_set_irn_register_reqs_in(sar, reg_reqs);
	arch_set_irn_register_req_out(sar, 0, &amd64_requirement_gp_same_0);
660
	return new_r_Proj(sar, mode_gp, pn_amd64_sar_res);
661
662
}

Tobias Rapp's avatar
Tobias Rapp committed
663
static ir_node *create_zext(ir_node *block, ir_node *const node)
664
{
Tobias Rapp's avatar
Tobias Rapp committed
665
666
	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);
667
	ir_node  *const xor0      = new_bd_amd64_xor_0(dbgi, new_block);
668
	arch_set_irn_register_reqs_in(xor0, reg_reqs);
669
	return new_r_Proj(xor0, mode_gp, pn_amd64_xor_0_res);
670
671
}

672
673
674
675
676
static bool val_input(unsigned in)
{
	return in != NO_INPUT && in != RIP_INPUT;
}

Tobias Rapp's avatar
Tobias Rapp committed
677
678
static bool use_address_matching(ir_mode *mode, match_flags_t flags,
                                 ir_node *block,
679
680
681
                                 ir_node *op1, ir_node *op2,
                                 ir_node **out_load, ir_node **out_op)
{
682
683
	if (! (flags & match_am))
		return false;
684

Tobias Rapp's avatar
Tobias Rapp committed
685
686
687
688
	unsigned mode_bits = get_mode_size_bits(mode);
	if (mode_bits == 8 || mode_bits == 16)
		return false;

689
690
691
	ir_node *load2 = source_am_possible(block, op2);
	if (load2 != NULL && !input_depends_on_load(load2, op1)) {
		(*out_load) = load2;
692
693
		(*out_op)   = op1;
		return true;
694
695
696
697
698
699
700
701
702
	}

	if (flags & match_commutative) {
		ir_node *load1 = source_am_possible(block, op1);
		if (load1 != NULL && !input_depends_on_load(load1, op2)) {
			(*out_load) = load1;
			(*out_op)   = op2;
			return true;
		}
703
704
705
706
	}
	return false;
}

707
708
static void perform_address_matching(ir_node *ptr, int *arity,
                                     ir_node **in, amd64_addr_t *addr)
709
710
711
{
	x86_address_t maddr;
	memset(&maddr, 0, sizeof(maddr));
712
	x86_create_address_mode(&maddr, ptr, x86_create_am_normal);
713
714
715
716
717
718

	if (maddr.base != NULL) {
		int base_input   = (*arity)++;
		addr->base_input = base_input;
		in[base_input]   = be_transform_node(maddr.base);
	} else {
719
720
721
722
723
724
725
		ir_entity *entity = maddr.entity;
		if (entity != NULL
		    && need_relative_addressing(entity) != REFERENCE_DIRECT) {
		    addr->base_input = RIP_INPUT;
		} else {
			addr->base_input = NO_INPUT;
		}
726
727
728
729
730
731
732
733
	}
	if (maddr.index != NULL) {
		int index_input = (*arity)++;
		addr->index_input = index_input;
		in[index_input]  = be_transform_node(maddr.index);
	} else {
		addr->index_input = NO_INPUT;
	}
734
735
736
737
738
739
740
741
742
743
	if (maddr.frame_entity != NULL) {
		assert(maddr.entity == NULL);
		addr->immediate.entity = maddr.frame_entity;
		/* not supported yet */
		assert(!is_parameter_entity(maddr.frame_entity)
		       || get_entity_parameter_number(maddr.frame_entity)
		          != IR_VA_START_PARAMETER_NUMBER);
	} else {
		addr->immediate.entity = maddr.entity;
	}
744
745
746
747
	addr->immediate.offset = maddr.offset;
	addr->log_scale        = maddr.scale;
}

Matthias Braun's avatar
Matthias Braun committed
748
749
750
static void match_binop(amd64_args_t *args, ir_node *block,
                        ir_mode *mode, ir_node *op1, ir_node *op2,
                        match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
751
752
753
{
	memset(args, 0, sizeof(*args));

754
	bool use_am;
755
	bool use_xmm       = mode_is_float(mode);
Matthias Braun's avatar
Matthias Braun committed
756
	bool use_immediate = flags & match_immediate;
757
	bool mode_neutral  = flags & match_mode_neutral;
Matthias Braun's avatar
Matthias Braun committed
758

Matthias Braun's avatar
Matthias Braun committed
759
	args->attr.base.insn_mode = get_insn_mode_from_mode(mode);
Matthias Braun's avatar
Matthias Braun committed
760
761

	/* TODO: legalize phase */
762
	if (mode_neutral) {
Matthias Braun's avatar
Matthias Braun committed
763
764
765
766
767
768
769
770
771
		op1 = skip_downconv(op1);
		op2 = skip_downconv(op2);
	} else {
		/* TODO: extend inputs? */
		(void)needs_extension;
	}

	ir_node *load;
	ir_node *op;
772

Tobias Rapp's avatar
Tobias Rapp committed
773
	use_am = use_address_matching(mode, flags, block, op1, op2, &load, &op);
774

775
776
	if (use_immediate
	    && match_immediate_32(&args->attr.u.immediate, op2, false, mode_neutral)) {
777
		assert(!use_xmm && "Can't (yet) match binop with xmm immediate");
Matthias Braun's avatar
Matthias Braun committed
778
		/* fine, we found an immediate */
Matthias Braun's avatar
Matthias Braun committed
779
780
781
		args->attr.base.base.op_mode = AMD64_OP_REG_IMM;
		args->in[args->arity++]      = be_transform_node(op1);
		args->reqs                   = reg_reqs;
782
	} else if (use_am) {
Matthias Braun's avatar
Matthias Braun committed
783
784
785
786
		ir_node *new_op        = be_transform_node(op);
		int      reg_input     = args->arity++;
		args->attr.u.reg_input = reg_input;
		args->in[reg_input]    = new_op;
787
		amd64_addr_t *addr     = &args->attr.base.addr;
Matthias Braun's avatar
Matthias Braun committed
788

789
790
791
		ir_node *ptr = get_Load_ptr(load);
		perform_address_matching(ptr, &(args->arity), args->in, addr);

792
		args->reqs = use_xmm ? xmm_mem_reqs : reg_mem_reqs;
793
		if (addr->base_input != NO_INPUT && addr->index_input != NO_INPUT) {
794
			args->reqs = use_xmm ? xmm_reg_reg_mem_reqs
795
			             : reg_reg_reg_mem_reqs;
796
		} else if (addr->base_input != NO_INPUT || addr->index_input != NO_INPUT) {
797
			args->reqs = use_xmm ? xmm_reg_mem_reqs
798
			             : reg_reg_mem_reqs;
799
800
801
802
803
		}
		ir_node *new_mem    = be_transform_node(get_Load_mem(load));
		int mem_input       = args->arity++;
		args->in[mem_input] = new_mem;
		addr->mem_input     = mem_input;
804

805
		args->mem_proj      = get_Proj_for_pn(load, pn_Load_M);
806
		args->attr.base.base.op_mode = AMD64_OP_ADDR_REG;
Matthias Braun's avatar
Matthias Braun committed
807
808
809
810
	} else {
		/* simply transform the arguments */
		args->in[args->arity++] = be_transform_node(op1);
		args->in[args->arity++] = be_transform_node(op2);
Matthias Braun's avatar
Matthias Braun committed
811
		args->attr.base.base.op_mode = AMD64_OP_REG_REG;
812

813
		args->reqs = use_xmm ? xmm_xmm_reqs : reg_reg_reqs;
Matthias Braun's avatar
Matthias Braun committed
814
815
816
817
	}
}

static ir_node *gen_binop_am(ir_node *node, ir_node *op1, ir_node *op2,
818
819
                             construct_binop_func func, unsigned pn_res,
                             match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
820
821
822
823
{
	ir_node *block = get_nodes_block(node);
	ir_mode *mode  = get_irn_mode(node);
	amd64_args_t args;
Matthias Braun's avatar
Matthias Braun committed
824
	match_binop(&args, block, mode, op1, op2, flags);
Matthias Braun's avatar
Matthias Braun committed
825
826
827
828

	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);

Matthias Braun's avatar
Matthias Braun committed
829
	ir_node *new_node = func(dbgi, new_block, args.arity, args.in, &args.attr);
Matthias Braun's avatar
Matthias Braun committed
830
	arch_set_irn_register_reqs_in(new_node, args.reqs);
831

Tobias Rapp's avatar
Tobias Rapp committed
832
	fix_node_mem_proj(new_node, args.mem_proj);
Matthias Braun's avatar
Matthias Braun committed
833

834
835
836
	if (mode_is_float(mode)) {
		arch_set_irn_register_req_out(new_node, 0,
		                              &amd64_requirement_xmm_same_0);
837
		return new_r_Proj(new_node, amd64_mode_xmm, pn_res);
838
839
840
	} else {
		arch_set_irn_register_req_out(new_node, 0,
		                              &amd64_requirement_gp_same_0);
841
		return new_r_Proj(new_node, mode_gp, pn_res);
842
	}
Matthias Braun's avatar
Matthias Braun committed
843
844
}

845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
static ir_node *gen_binop_rax(ir_node *node, ir_node *op1, ir_node *op2,
                              construct_rax_binop_func make_node,
                              match_flags_t flags)
{
	bool use_am;
	bool mode_neutral  = flags & match_mode_neutral;
	assert(! (flags & match_immediate));

	ir_mode *mode = get_irn_mode(op1);
	amd64_insn_mode_t insn_mode = get_insn_mode_from_mode(mode);

	/* TODO: legalize phase */
	if (mode_neutral) {
		op1 = skip_downconv(op1);
		op2 = skip_downconv(op2);
	} else {
		/* TODO: extend inputs? */
		(void)needs_extension;
	}

	ir_node *load;
	ir_node *op;
	ir_node *block = get_nodes_block(node);
	ir_node *in[4];
	int      arity = 0;
	const arch_register_req_t **reqs;
	amd64_op_mode_t op_mode;
	amd64_addr_t    addr;
	memset(&addr, 0, sizeof(addr));

Tobias Rapp's avatar
Tobias Rapp committed
875
	use_am = use_address_matching(mode, flags, block, op1, op2, &load, &op);
876

877
	ir_node *mem_proj = NULL;
878
879
880
881
882
	if (use_am) {
		ir_node *new_op    = be_transform_node(op);
		int      reg_input = arity++;
		in[reg_input]      = new_op;

883
884
885
886
887
888
		ir_node *ptr = get_Load_ptr(load);
		perform_address_matching(ptr, &arity, in, &addr);

		reqs = reg_mem_reqs;
		if (addr.base_input != NO_INPUT && addr.index_input != NO_INPUT) {
			reqs = reg_reg_reg_mem_reqs;
889
		} else if (addr.base_input != NO_INPUT || addr.index_input != NO_INPUT) {
890
891
892
893
894
895
896
			reqs = reg_reg_mem_reqs;
		}

		ir_node *new_mem = be_transform_node(get_Load_mem(load));
		int mem_input    = arity++;
		in[mem_input]    = new_mem;
		addr.mem_input   = mem_input;
897

898
		mem_proj                = get_Proj_for_pn(load, pn_Load_M);
899
900
901
		op_mode                 = AMD64_OP_RAX_ADDR;
	} else {
		/* simply transform the arguments */
902
903
904
905
		in[arity++] = be_transform_node(op1);
		in[arity++] = be_transform_node(op2);
		reqs        = rax_reg_reqs;
		op_mode     = AMD64_OP_RAX_REG;
906
907
	}

908
	assert((size_t)arity <= ARRAY_SIZE(in));
909
910
911
912
913
	dbg_info *dbgi      = get_irn_dbg_info(node);
	ir_node  *new_block = be_transform_node(block);
	ir_node  *new_node  = make_node(dbgi, new_block, arity, in, insn_mode,
	                                op_mode, addr);
	arch_set_irn_register_reqs_in(new_node, reqs);
914
915
916
	if (mem_proj != NULL) {
		be_set_transformed_node(load, new_node);
	}
917
918
919
	return new_node;
}

920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
static ir_node *gen_binop_xmm(ir_node *node, ir_node *op0, ir_node *op1,
                              construct_binop_func make_node,
                              match_flags_t flags)
{
	ir_node *block = get_nodes_block(node);
	ir_mode *mode  = get_irn_mode(op0);
	amd64_args_t args;
	memset(&args, 0, sizeof(args));

	ir_node *load;
	ir_node *op;
	bool use_am = use_address_matching(mode, flags, block, op0, op1, &load,
	                                   &op);

	if (use_am) {
		int reg_input = args.arity++;
		args.attr.u.reg_input = reg_input;
		args.in[reg_input]    = be_transform_node(op);

		amd64_addr_t *addr = &args.attr.base.addr;
		ir_node      *ptr  = get_Load_ptr(load);
		perform_address_matching(ptr, &args.arity, args.in, addr);

		unsigned reg_count
			= val_input(addr->base_input) + val_input(addr->index_input);
		args.reqs = reg_count == 0 ? xmm_mem_reqs :
		            reg_count == 1 ? xmm_reg_mem_reqs
		                           : xmm_reg_reg_mem_reqs;

		ir_node *new_mem   = be_transform_node(get_Load_mem(load));
		int mem_input      = args.arity++;
		args.in[mem_input] = new_mem;
		addr->mem_input    = mem_input;

		args.mem_proj      = get_Proj_for_pn(load, pn_Load_M);
		args.attr.base.base.op_mode = AMD64_OP_ADDR_REG;
	} else {
		args.in[args.arity++] = be_transform_node(op0);
		args.in[args.arity++] = be_transform_node(op1);
		args.attr.base.base.op_mode = AMD64_OP_REG_REG;
		args.reqs = xmm_xmm_reqs;
	}

	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);
	ir_node *new_node = make_node(dbgi, new_block, args.arity, args.in,
	                              &args.attr);
	arch_set_irn_register_reqs_in(new_node, args.reqs);

	fix_node_mem_proj(new_node, args.mem_proj);

	arch_set_irn_register_req_out(new_node, 0,
								  &amd64_requirement_xmm_same_0);
973
	return new_r_Proj(new_node, amd64_mode_xmm, pn_amd64_subs_res);
974
975
}

Matthias Braun's avatar
Matthias Braun committed
976
typedef ir_node *(*construct_shift_func)(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
977
	int arity, ir_node *in[], const amd64_shift_attr_t *attr_init);
Matthias Braun's avatar
Matthias Braun committed
978
979

static ir_node *gen_shift_binop(ir_node *node, ir_node *op1, ir_node *op2,
980
981
                                construct_shift_func func, unsigned pn_res,
                                match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
982
983
984
985
986
987
988
989
990
991
992
993
{
	ir_mode *mode = get_irn_mode(node);
	assert(!mode_is_float(mode));

	if (get_mode_modulo_shift(mode) != 32 && get_mode_size_bits(mode) != 64)
		panic("insupported modulo shift used");

	ir_node *in[3];
	int      arity = 0;
	if (flags & match_mode_neutral) {
		op1 = skip_downconv(op1);
		in[arity++] = be_transform_node(op1);
Matthias Braun's avatar
Matthias Braun committed
994
		mode = get_mode_size_bits(mode) > 32 ? mode_gp : mode_Iu;
Matthias Braun's avatar
Matthias Braun committed
995
996
	} else {
		op1 = skip_sameconv(op1);
997
998
999

		/* Use 8/16bit operations instead of doing zext/upconv */
		in[arity++] = be_transform_node(op1);
Matthias Braun's avatar
Matthias Braun committed
1000
1001
1002
1003
1004
1005
	}

	/* we can skip any convs for the shift count, as it only uses the lowest
	 * 5/6 bits anyway */
	while (is_Conv(op2) && get_irn_n_edges(op2) == 1) {
		ir_node *const op = get_Conv_op(op2);
1006
		if (get_mode_arithmetic(get_irn_mode(op)) != irma_twos_complement)
Matthias Braun's avatar
Matthias Braun committed
1007
1008
1009
1010
			break;
		op2 = op;
	}

Matthias Braun's avatar
Matthias Braun committed
1011
1012
	amd64_shift_attr_t attr;
	memset(&attr, 0, sizeof(attr));
Matthias Braun's avatar
Matthias Braun committed
1013
1014
1015
	const arch_register_req_t **reqs;
	const arch_register_req_t  *out_req0;
	if (is_Const(op2)) {
Matthias Braun's avatar
Matthias Braun committed
1016
1017
1018
		attr.base.op_mode = AMD64_OP_SHIFT_IMM;
		reqs              = reg_reqs;
		out_req0          = &amd64_requirement_gp_same_0;
1019
		attr.immediate    = get_Const_long(op2);
Matthias Braun's avatar
Matthias Braun committed
1020
	} else {
Matthias Braun's avatar
Matthias Braun committed
1021
1022
1023
1024
		attr.base.op_mode = AMD64_OP_SHIFT_REG;
		in[arity++]       = be_transform_node(op2);
		reqs              = reg_rcx_reqs;
		out_req0          = &amd64_requirement_gp_same_0_not_1;
Matthias Braun's avatar
Matthias Braun committed
1025
	}
Matthias Braun's avatar
Matthias Braun committed
1026
	attr.insn_mode = get_insn_mode_from_mode(mode);
Matthias Braun's avatar
Matthias Braun committed
1027

Matthias Braun's avatar
Matthias Braun committed
1028
1029
1030
1031
	ir_node  *const block     = get_nodes_block(node);
	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);
	ir_node  *const new_node  = func(dbgi, new_block, arity, in, &attr);
Matthias Braun's avatar
Matthias Braun committed
1032
1033
	arch_set_irn_register_reqs_in(new_node, reqs);
	arch_set_irn_register_req_out(new_node, 0, out_req0);
1034
	return new_r_Proj(new_node, mode_gp, pn_res);
1035
1036
}

1037
static ir_node *create_lea_as_add(ir_node *node, ir_node *op1, ir_node *op2)
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
{
	dbg_info *const dbgi = get_irn_dbg_info(node);
	ir_node  *block      = get_nodes_block(node);
	ir_node  *new_block  = be_transform_node(block);
	ir_mode *mode        = get_irn_mode(node);

	amd64_insn_mode_t insn_mode;
	if (get_mode_size_bits(mode) <= 32)
		insn_mode = INSN_MODE_32;
	else
		insn_mode = INSN_MODE_64;

	const arch_register_req_t **reqs;
	amd64_addr_t addr;
	memset(&addr, 0, sizeof(addr));

	ir_node *in[2];
	int arity = 0;

	if (match_immediate_32(&addr.immediate, op2, false, true)) {
		in[arity++]      = be_transform_node(op1);
		reqs             = reg_reqs;
		addr.index_input = NO_INPUT;
	} else {
		in[arity++]      = be_transform_node(op1);
		in[arity++]      = be_transform_node(op2);
1064
1065
		addr.base_input  = 0;
		addr.index_input = 1;
1066
1067
1068
		reqs             = reg_reg_reqs;
	}

1069
	ir_node *res = new_bd_amd64_lea(dbgi, new_block, arity, in, insn_mode, addr);
1070
	arch_set_irn_register_reqs_in(res, reqs);
1071
	return res;
1072
1073
}

Matthias Braun's avatar
Matthias Braun committed
1074
1075
static ir_node *gen_Add(ir_node *const node)
{
1076
1077
1078
	match_flags_t flags = match_immediate | match_am | match_mode_neutral
	                      | match_commutative;

Matthias Braun's avatar
Matthias Braun committed
1079
1080
	ir_node *op1 = get_Add_left(node);
	ir_node *op2 = get_Add_right(node);
1081
1082
1083
1084
1085

	ir_mode *mode  = get_irn_mode(node);
	ir_node *block = get_nodes_block(node);
	ir_node *load, *op;

1086
	if (mode_is_float(mode)) {
1087
1088
		return gen_binop_am(node, op1, op2, new_bd_amd64_adds,
							pn_amd64_adds_res, match_commutative | match_am);
1089
1090
	}

1091
1092
1093
1094
	bool use_am = use_address_matching(mode, flags, block, op1, op2, &load, &op);

	ir_node *res;
	if (use_am)
1095
1096
		res = gen_binop_am(node, op1, op2, new_bd_amd64_add, pn_amd64_add_res,
		                   flags);
1097
	else
1098
		res = create_lea_as_add(node, op1, op2);
1099

Matthias Braun's avatar
Matthias Braun committed
1100
1101
1102
	x86_mark_non_am(node);
	return res;
}
1103

Matthias Braun's avatar
Matthias Braun committed
1104
static ir_node *gen_Sub(ir_node *const node)
1105
{
Matthias Braun's avatar
Matthias Braun committed
1106
1107
	ir_node  *const op1     = get_Sub_left(node);
	ir_node  *const op2     = get_Sub_right(node);
1108
	ir_mode  *const mode    = get_irn_mode(node);
1109
1110

	if (mode_is_float(mode)) {
1111
1112
		return gen_binop_am(node, op1, op2, new_bd_amd64_subs,
		                    pn_amd64_subs_res, match_am);
1113
	} else {
1114
		/* TODO: do not match AM yet until we have a sub->neg+add rule
1115
		 * in amd64_finish */
1116
1117
		return gen_binop_am(node, op1, op2, new_bd_amd64_sub, pn_amd64_sub_res,
		                    match_immediate);
1118
	}
Matthias Braun's avatar
Matthias Braun committed
1119
1120
1121
1122
1123
1124
}

static ir_node *gen_And(ir_node *const node)
{
	ir_node *op1 = get_And_left(node);
	ir_node *op2 = get_And_right(node);
1125
	return gen_binop_am(node, op1, op2, new_bd_amd64_and, pn_amd64_and_res,
Matthias Braun's avatar
Matthias Braun committed
1126
1127
	                    match_immediate | match_am | match_mode_neutral
	                    | match_commutative);
1128
1129
}

Matthias Braun's avatar
Matthias Braun committed
1130
1131
1132
1133
static ir_node *gen_Eor(ir_node *const node)
{
	ir_node *op1 = get_Eor_left(node);
	ir_node *op2 = get_Eor_right(node);
1134
	return gen_binop_am(node, op1, op2, new_bd_amd64_xor, pn_amd64_xor_res,
Matthias Braun's avatar
Matthias Braun committed
1135
1136
1137
1138
1139
1140
1141
1142
	                    match_immediate | match_am | match_mode_neutral
	                    | match_commutative);
}

static ir_node *gen_Or(ir_node *const node)
{
	ir_node *op1 = get_Or_left(node);
	ir_node *op2 = get_Or_right(node);
1143
	return gen_binop_am(node, op1, op2, new_bd_amd64_or, pn_amd64_or_res,
Matthias Braun's avatar
Matthias Braun committed
1144
1145
1146
1147
1148
1149
	                    match_immediate | match_am | match_mode_neutral
	                    | match_commutative);
}

static ir_node *gen_Mul(ir_node *const node)
{
1150
1151
1152
1153
1154
1155
1156
	ir_node *op1  = get_Mul_left(node);
	ir_node *op2  = get_Mul_right(node);
	ir_mode *mode = get_irn_mode(node);

	if (get_mode_size_bits(mode) < 16) {
		/* imulb only supports rax - reg form */
		ir_node *new_node =
1157
		            gen_binop_rax(node, op1, op2, new_bd_amd64_imul_1op,