amd64_transform.c 106 KB
Newer Older
1
2
/*
 * This file is part of libFirm.
3
 * Copyright (C) 2012 University of Karlsruhe.
4
5
6
7
8
9
 */

/**
 * @file
 * @brief   code selection (transform FIRM into amd64 FIRM)
 */
Matthias Braun's avatar
Matthias Braun committed
10
#include "debug.h"
Matthias Braun's avatar
Matthias Braun committed
11
#include "panic.h"
Matthias Braun's avatar
Matthias Braun committed
12
13
#include "heights.h"
#include "ircons.h"
14
#include "iredges_t.h"
Matthias Braun's avatar
Matthias Braun committed
15
#include "irgmod.h"
16
17
#include "irgraph_t.h"
#include "irmode_t.h"
Matthias Braun's avatar
Matthias Braun committed
18
#include "irnode_t.h"
19
#include "iropt_t.h"
20
#include "irprog_t.h"
21
#include "tv_t.h"
Matthias Braun's avatar
Matthias Braun committed
22
#include "util.h"
23

24
25
#include "benode.h"
#include "betranshlp.h"
26
#include "bearch_amd64_t.h"
27
#include "beirg.h"
Matthias Braun's avatar
Matthias Braun committed
28
#include "besched.h"
29

Matthias Braun's avatar
Matthias Braun committed
30
#include "amd64_new_nodes.h"
31
32
#include "amd64_nodes_attr.h"
#include "amd64_transform.h"
33
#include "amd64_varargs.h"
Matthias Braun's avatar
Matthias Braun committed
34
#include "../ia32/x86_address_mode.h"
35
#include "../ia32/x86_cconv.h"
36
37
38
39
40

#include "gen_amd64_regalloc_if.h"

DEBUG_ONLY(static firm_dbg_module_t *dbg = NULL;)

41
42
43
static ir_mode        *mode_gp;
static x86_cconv_t    *current_cconv = NULL;
static be_stack_env_t  stack_env;
44

45
46
47
/** we don't have a concept of aliasing registers, so enumerate them
 * manually for the asm nodes. */
const x86_clobber_name_t amd64_additional_clobber_names[] = {
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
	{ "al", REG_RAX }, { "ah", REG_RAX }, { "ax", REG_RAX }, { "eax", REG_RAX },
	{ "bl", REG_RBX }, { "bh", REG_RBX }, { "bx", REG_RBX }, { "ebx", REG_RBX },
	{ "cl", REG_RCX }, { "ch", REG_RCX }, { "cx", REG_RCX }, { "ecx", REG_RCX },
	{ "dl", REG_RDX }, { "dh", REG_RDX }, { "dx", REG_RDX }, { "edx", REG_RDX },
	{ "sil",  REG_RSI }, { "si",   REG_RSI }, { "esi",  REG_RSI },
	{ "dil",  REG_RDI }, { "di",   REG_RDI }, { "edi",  REG_RDI },
	{ "bpl",  REG_RBP }, { "bp",   REG_RBP }, { "ebp",  REG_RBP },
	{ "spl",  REG_RSP }, { "sp",   REG_RSP }, { "esp",  REG_RSP },
	{ "r8b",  REG_R8  }, { "r8w",  REG_R8  }, { "r8d",  REG_R8  },
	{ "r9b",  REG_R9  }, { "r9w",  REG_R9  }, { "r9d",  REG_R9  },
	{ "r10b", REG_R10 }, { "r10w", REG_R10 }, { "r10d", REG_R10 },
	{ "r11b", REG_R11 }, { "r11w", REG_R11 }, { "r11d", REG_R11 },
	{ "r12b", REG_R12 }, { "r12w", REG_R12 }, { "r12d", REG_R12 },
	{ "r13b", REG_R13 }, { "r13w", REG_R13 }, { "r13d", REG_R13 },
	{ "r14b", REG_R14 }, { "r14w", REG_R14 }, { "r14d", REG_R14 },
	{ "r15b", REG_R15 }, { "r15w", REG_R15 }, { "r15d", REG_R15 },
64
65
66
67
	{ NULL, ~0u }
};

#define GP &amd64_reg_classes[CLASS_amd64_gp]
68
const x86_asm_constraint_list_t amd64_asm_constraints = {
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
	['A'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RDX },
	['D'] = { MATCH_REG, GP, 1 << REG_GP_RDI },
	['I'] = { MATCH_IMM, GP, 0 },
	['J'] = { MATCH_IMM, GP, 0 },
	['K'] = { MATCH_IMM, GP, 0 },
	['L'] = { MATCH_IMM, GP, 0 },
	['M'] = { MATCH_IMM, GP, 0 },
	['N'] = { MATCH_IMM, GP, 0 },
	['O'] = { MATCH_IMM, GP, 0 },
	['R'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX | 1 << REG_GP_RSI
		| 1 << REG_GP_RDI | 1 << REG_GP_RBP | 1 << REG_GP_RSP },
	['S'] = { MATCH_REG, GP, 1 << REG_GP_RSI },
	['Q'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX },
	['V'] = { MATCH_MEM, GP, 0 },
	['X'] = { MATCH_ANY, GP, 0 },
	['a'] = { MATCH_REG, GP, 1 << REG_GP_RAX },
	['b'] = { MATCH_REG, GP, 1 << REG_GP_RBX },
	['c'] = { MATCH_REG, GP, 1 << REG_GP_RCX },
	['d'] = { MATCH_REG, GP, 1 << REG_GP_RDX },
90
	['e'] = { MATCH_IMM, GP, 0 },
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
	['g'] = { MATCH_ANY, GP, 0 },
	['i'] = { MATCH_IMM, GP, 0 },
	['l'] = { MATCH_REG, GP, 1 << REG_GP_RAX | 1 << REG_GP_RBX
		| 1 << REG_GP_RCX | 1 << REG_GP_RDX | 1 << REG_GP_RSI
		| 1 << REG_GP_RDI | 1 << REG_GP_RBP },
	['m'] = { MATCH_MEM, GP, 0 },
	['n'] = { MATCH_IMM, GP, 0 },
	['o'] = { MATCH_MEM, GP, 0 },
	['p'] = { MATCH_REG, GP, 0 },
	['q'] = { MATCH_REG, GP, 0 },
	['r'] = { MATCH_REG, GP, 0 },
	['x'] = { MATCH_REG, &amd64_reg_classes[CLASS_amd64_xmm], 0 },

	// see comments in ia32_transform.c about unimplemented stuff.
};
#undef GP

Matthias Braun's avatar
Matthias Braun committed
108
109
110
#define BIT(x)    (1u << x)

static const arch_register_req_t amd64_requirement_gp_same_0 = {
111
112
113
	.cls               = &amd64_reg_classes[CLASS_amd64_gp],
	.should_be_same    = BIT(0),
	.width             = 1,
Matthias Braun's avatar
Matthias Braun committed
114
115
};

116
static const arch_register_req_t amd64_requirement_xmm_same_0 = {
117
118
119
	.cls               = &amd64_reg_classes[CLASS_amd64_xmm],
	.should_be_same    = BIT(0),
	.width             = 1,
120
121
};

Matthias Braun's avatar
Matthias Braun committed
122
static const arch_register_req_t amd64_requirement_gp_same_0_not_1 = {
123
124
125
126
	.cls               = &amd64_reg_classes[CLASS_amd64_gp],
	.should_be_same    = BIT(0),
	.must_be_different = BIT(1),
	.width             = 1,
Matthias Braun's avatar
Matthias Braun committed
127
128
};

129
static const arch_register_req_t amd64_requirement_xmm_same_0_not_1 = {
130
131
132
133
	.cls               = &amd64_reg_classes[CLASS_amd64_xmm],
	.should_be_same    = BIT(0),
	.must_be_different = BIT(1),
	.width             = 1,
134
135
};

Matthias Braun's avatar
Matthias Braun committed
136
137
138
139
140
141
static const arch_register_req_t amd64_requirement_x87killed = {
	.cls         = &amd64_reg_classes[CLASS_amd64_x87],
	.width       = 1,
	.kills_value = true,
};

Matthias Braun's avatar
Matthias Braun committed
142
static const arch_register_req_t *mem_reqs[] = {
143
	&arch_memory_requirement,
Matthias Braun's avatar
Matthias Braun committed
144
145
146
};

static const arch_register_req_t *reg_mem_reqs[] = {
147
	&amd64_class_reg_req_gp,
148
	&arch_memory_requirement,
Matthias Braun's avatar
Matthias Braun committed
149
150
};

Tobias Rapp's avatar
Tobias Rapp committed
151
static const arch_register_req_t *rsp_mem_reqs[] = {
152
	&amd64_single_reg_req_gp_rsp,
153
	&arch_memory_requirement,
Tobias Rapp's avatar
Tobias Rapp committed
154
155
};

156
arch_register_req_t const *rsp_reg_mem_reqs[] = {
157
158
	&amd64_single_reg_req_gp_rsp,
	&amd64_class_reg_req_gp,
159
	&arch_memory_requirement,
Tobias Rapp's avatar
Tobias Rapp committed
160
161
};

162
static const arch_register_req_t *xmm_mem_reqs[] = {
163
	&amd64_class_reg_req_xmm,
164
	&arch_memory_requirement,
165
166
};

Matthias Braun's avatar
Matthias Braun committed
167
168
static const arch_register_req_t *x87K_mem_reqs[] = {
	&amd64_requirement_x87killed,
Matthias Braun's avatar
Matthias Braun committed
169
170
171
	&arch_memory_requirement,
};

Matthias Braun's avatar
Matthias Braun committed
172
static const arch_register_req_t *reg_reg_mem_reqs[] = {
173
174
	&amd64_class_reg_req_gp,
	&amd64_class_reg_req_gp,
175
	&arch_memory_requirement,
Matthias Braun's avatar
Matthias Braun committed
176
177
};

178
arch_register_req_t const *xmm_reg_mem_reqs[] = {
179
180
	&amd64_class_reg_req_xmm,
	&amd64_class_reg_req_gp,
181
	&arch_memory_requirement,
182
183
};

Matthias Braun's avatar
Matthias Braun committed
184
185
186
187
188
189
static const arch_register_req_t *x87_reg_mem_reqs[] = {
	&amd64_class_reg_req_x87,
	&amd64_class_reg_req_gp,
	&arch_memory_requirement,
};

Matthias Braun's avatar
Matthias Braun committed
190
191
192
193
194
195
static const arch_register_req_t *x87K_reg_mem_reqs[] = {
	&amd64_requirement_x87killed,
	&amd64_class_reg_req_gp,
	&arch_memory_requirement,
};

Matthias Braun's avatar
Matthias Braun committed
196
static const arch_register_req_t *reg_reg_reg_mem_reqs[] = {
197
198
199
	&amd64_class_reg_req_gp,
	&amd64_class_reg_req_gp,
	&amd64_class_reg_req_gp,
200
	&arch_memory_requirement,
Matthias Braun's avatar
Matthias Braun committed
201
202
};

203
static const arch_register_req_t *xmm_reg_reg_mem_reqs[] = {
204
205
206
	&amd64_class_reg_req_xmm,
	&amd64_class_reg_req_gp,
	&amd64_class_reg_req_gp,
207
	&arch_memory_requirement,
208
209
};

Matthias Braun's avatar
Matthias Braun committed
210
211
static const arch_register_req_t *x87K_reg_reg_mem_reqs[] = {
	&amd64_requirement_x87killed,
Matthias Braun's avatar
Matthias Braun committed
212
213
214
215
216
	&amd64_class_reg_req_gp,
	&amd64_class_reg_req_gp,
	&arch_memory_requirement,
};

Tobias Rapp's avatar
Tobias Rapp committed
217
static const arch_register_req_t *reg_flags_reqs[] = {
218
219
	&amd64_class_reg_req_gp,
	&amd64_class_reg_req_flags,
220
221
};

222
arch_register_req_t const *amd64_reg_reg_reqs[] = {
223
224
	&amd64_class_reg_req_gp,
	&amd64_class_reg_req_gp,
225
226
};

227
static const arch_register_req_t *reg_rax_reqs[] = {
228
	&amd64_class_reg_req_gp,
229
	&amd64_single_reg_req_gp_rax,
230
231
};

232
static const arch_register_req_t *reg_rax_rdx_mem_reqs[] = {
233
	&amd64_class_reg_req_gp,
234
	&amd64_single_reg_req_gp_rax,
235
	&amd64_single_reg_req_gp_rdx,
236
	&arch_memory_requirement,
237
238
};

239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
static const arch_register_req_t *rax_reg_mem_reqs[] = {
	&amd64_single_reg_req_gp_rax,
	&amd64_class_reg_req_gp,
	&arch_memory_requirement,
};

static const arch_register_req_t *reg_rax_reg_mem_reqs[] = {
	&amd64_class_reg_req_gp,
	&amd64_single_reg_req_gp_rax,
	&amd64_class_reg_req_gp,
	&arch_memory_requirement,
};

static const arch_register_req_t *reg_reg_rax_reg_mem_reqs[] = {
	&amd64_class_reg_req_gp,
	&amd64_class_reg_req_gp,
	&amd64_single_reg_req_gp_rax,
	&amd64_class_reg_req_gp,
	&arch_memory_requirement,
};

260
arch_register_req_t const *reg_reqs[] = {
261
	&amd64_class_reg_req_gp,
Matthias Braun's avatar
Matthias Braun committed
262
263
};

264
arch_register_req_t const *amd64_xmm_reqs[] = {
265
	&amd64_class_reg_req_xmm,
266
267
};

Matthias Braun's avatar
Matthias Braun committed
268
static const arch_register_req_t *reg_rcx_reqs[] = {
269
270
	&amd64_class_reg_req_gp,
	&amd64_single_reg_req_gp_rcx,
Matthias Braun's avatar
Matthias Braun committed
271
272
};

273
274
275
static const arch_register_req_t *no_reqs[] = {
};

276
arch_register_req_t const *amd64_xmm_xmm_reqs[] = {
277
278
	&amd64_class_reg_req_xmm,
	&amd64_class_reg_req_xmm,
279
280
};

281
arch_register_req_t const **const gp_am_reqs[] = {
282
283
284
285
286
287
288
289
290
291
292
293
294
	mem_reqs,
	reg_mem_reqs,
	reg_reg_mem_reqs,
	reg_reg_reg_mem_reqs,
};

static arch_register_req_t const **const xmm_am_reqs[] = {
	mem_reqs,
	xmm_mem_reqs,
	xmm_reg_mem_reqs,
	xmm_reg_reg_mem_reqs,
};

Matthias Braun's avatar
Matthias Braun committed
295
static arch_register_req_t const **const x87K_am_reqs[] = {
Matthias Braun's avatar
Matthias Braun committed
296
	mem_reqs,
Matthias Braun's avatar
Matthias Braun committed
297
298
299
	x87K_mem_reqs,
	x87K_reg_mem_reqs,
	x87K_reg_reg_mem_reqs,
Matthias Braun's avatar
Matthias Braun committed
300
301
};

Matthias Braun's avatar
Matthias Braun committed
302
303
static inline bool mode_needs_gp_reg(ir_mode *mode)
{
304
305
	return get_mode_arithmetic(mode) == irma_twos_complement
	    && mode != amd64_mode_xmm; /* mode_xmm is 128bit int at the moment */
Matthias Braun's avatar
Matthias Braun committed
306
307
}

308
309
static ir_node *get_initial_sp(ir_graph *irg)
{
Christoph Mallon's avatar
Christoph Mallon committed
310
	return be_get_Start_proj(irg, &amd64_registers[REG_RSP]);
311
312
313
314
}

static ir_node *get_initial_fp(ir_graph *irg)
{
Christoph Mallon's avatar
Christoph Mallon committed
315
	return be_get_Start_proj(irg, &amd64_registers[REG_RBP]);
316
317
318
319
320
321
322
323
324
325
}

static ir_node *get_frame_base(ir_graph *irg)
{
	if (current_cconv->omit_fp) {
		return get_initial_sp(irg);
	} else {
		return get_initial_fp(irg);
	}
}
326

327
static amd64_insn_size_t get_insn_size_from_mode(const ir_mode *mode)
328
329
{
	switch (get_mode_size_bits(mode)) {
330
331
332
333
334
335
	case   8: return INSN_SIZE_8;
	case  16: return INSN_SIZE_16;
	case  32: return INSN_SIZE_32;
	case  64: return INSN_SIZE_64;
	case  80: return INSN_SIZE_80;
	case 128: return INSN_SIZE_128;
336
	}
337
	panic("unexpected mode %+F", mode);
338
339
}

340
ir_entity *create_float_const_entity(ir_tarval *const tv)
341
{
342
	/* TODO: share code with ia32 backend */
343
	ir_entity *entity = pmap_get(ir_entity, amd64_constants, tv);
344
345
346
347
	if (entity != NULL)
		return entity;

	ir_mode *mode = get_tarval_mode(tv);
348
	ir_type *type = mode == x86_mode_E ? x86_type_E : get_type_for_mode(mode);
349
350
	ir_type *glob = get_glob_type();

351
352
353
	entity = new_global_entity(glob, id_unique("C%u"), type,
	                           ir_visibility_private,
	                           IR_LINKAGE_CONSTANT | IR_LINKAGE_NO_IDENTITY);
354
355
356
357

	ir_initializer_t *initializer = create_initializer_tarval(tv);
	set_entity_initializer(entity, initializer);

358
	pmap_insert(amd64_constants, tv, entity);
359
360
361
	return entity;
}

362
void init_lconst_addr(amd64_addr_t *addr, ir_entity *entity)
363
{
364
	assert(entity_has_definition(entity));
365
	assert(get_entity_linkage(entity) & IR_LINKAGE_CONSTANT);
366
	assert(get_entity_visibility(entity) == ir_visibility_private);
367
368
	x86_immediate_kind_t kind = be_options.pic_style != BE_PIC_NONE
	                          ? X86_IMM_PCREL : X86_IMM_ADDR;
369
370
371
372
373
374
375
	*addr = (amd64_addr_t) {
		.immediate = {
			.entity = entity,
			.kind   = kind,
		},
		.variant = kind == X86_IMM_PCREL ? X86_ADDR_RIP : X86_ADDR_JUST_IMM,
	};
376
377
}

378
static ir_node *create_float_const(dbg_info *dbgi, ir_node *block,
Matthias Braun's avatar
Matthias Braun committed
379
                                   ir_tarval *tv)
380
381
382
{
	ir_graph  *irg     = get_irn_irg(block);
	ir_mode   *tv_mode = get_tarval_mode(tv);
383
	ir_entity *entity  = create_float_const_entity(tv);
384
385
386
387
	ir_node   *nomem   = get_irg_no_mem(irg);

	ir_node *in[] = { nomem };
	amd64_addr_t addr;
388
	init_lconst_addr(&addr, entity);
389

390
	ir_node *load;
391
	unsigned pn_res;
392
393
	amd64_insn_size_t size = get_insn_size_from_mode(tv_mode);
	if (size == INSN_SIZE_128) {
394
		load = new_bd_amd64_movdqa(dbgi, block, ARRAY_SIZE(in), in, mem_reqs, AMD64_OP_ADDR, addr);
395
		pn_res = pn_amd64_movdqa_res;
396
	} else {
397
		load = new_bd_amd64_movs_xmm(dbgi, block, ARRAY_SIZE(in), in, mem_reqs, size, AMD64_OP_ADDR, addr);
398
		pn_res = pn_amd64_movs_xmm_res;
399
	}
Matthias Braun's avatar
Matthias Braun committed
400
	set_irn_pinned(load, false);
401

402
	return be_new_Proj(load, pn_res);
403
404
405
406
407
}

ir_tarval *create_sign_tv(ir_mode *mode)
{
	unsigned size = get_mode_size_bits(mode);
Matthias Braun's avatar
Matthias Braun committed
408
409
410
411
	assert(size == 32 || size == 64 || size == 128);
	ir_mode *intmode = size == 128 ? amd64_mode_xmm
	                 : size == 64  ? mode_Lu
	                               : mode_Iu;
412
413
414
	ir_tarval *one  = get_mode_one(intmode);
	ir_tarval *sign = tarval_shl_unsigned(one, size-1);
	return tarval_bitcast(sign, mode);
415
416
}

417
static ir_node *gen_x87_Const(ir_node *const block, ir_tarval *tv)
Matthias Braun's avatar
Matthias Braun committed
418
419
420
421
422
423
424
{
	/* TODO: avoid code duplication with ia32 backend */
	if (tarval_is_null(tv)) {
		return new_bd_amd64_fldz(NULL, block);
	} else if (tarval_is_one(tv)) {
		return new_bd_amd64_fld1(NULL, block);
	} else {
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
		ir_mode *mode = get_tarval_mode(tv);
		/* try to reduce the mode to produce smaller sized entities */
		ir_mode *const modes[] = { mode_F, mode_D, NULL };
		for (ir_mode *const *i = modes; *i != NULL; ++i) {
			ir_mode *const to = *i;
			if (tarval_ieee754_can_conv_lossless(tv, to)) {
				tv   = tarval_convert_to(tv, to);
				mode = to;
				break;
			}
		}
		ir_entity *entity = create_float_const_entity(tv);
		ir_graph  *irg    = get_irn_irg(block);
		ir_node   *nomem  = get_irg_no_mem(irg);
		ir_node   *in[1]  = { nomem };
		amd64_addr_t addr;
		init_lconst_addr(&addr, entity);
442
		amd64_insn_size_t size = get_insn_size_from_mode(mode);
443
		ir_node *load = new_bd_amd64_fld(NULL, block, ARRAY_SIZE(in), in,
444
		                                 mem_reqs, size, AMD64_OP_ADDR, addr);
445
446
		set_irn_pinned(load, false);
		return be_new_Proj(load, pn_amd64_fld_res);
Matthias Braun's avatar
Matthias Braun committed
447
448
449
	}
}

Matthias Braun's avatar
Matthias Braun committed
450
static ir_node *gen_Const(ir_node *const node)
451
{
452
	ir_node  *block = be_transform_nodes_block(node);
453
	dbg_info *dbgi  = get_irn_dbg_info(node);
454
	ir_mode  *mode  = get_irn_mode(node);
455
	ir_tarval *tv = get_Const_tarval(node);
456
457

	if (!mode_needs_gp_reg(mode)) {
Matthias Braun's avatar
Matthias Braun committed
458
459
460
		if (mode == x86_mode_E) {
			return gen_x87_Const(block, tv);
		} else if (tarval_is_null(tv)) {
461
			return new_bd_amd64_xorpd_0(dbgi, block);
462
		}
463
		return create_float_const(dbgi, block, tv);
464
465
	}

466
	uint64_t val = get_tarval_uint64(tv);
467
	amd64_insn_size_t imode = val > UINT32_MAX ? INSN_SIZE_64 : INSN_SIZE_32;
Matthias Braun's avatar
Matthias Braun committed
468
469
470
471
472
	amd64_imm64_t const imm = {
		.kind   = X86_IMM_VALUE,
		.offset = val,
	};
	return new_bd_amd64_mov_imm(dbgi, block, imode, &imm);
473
474
}

Matthias Braun's avatar
Matthias Braun committed
475
static ir_node *gen_Address(ir_node *const node)
476
{
477
	ir_node   *block  = be_transform_nodes_block(node);
478
	dbg_info  *dbgi   = get_irn_dbg_info(node);
479
	ir_entity *entity = get_Address_entity(node);
480

481
482
483
484
	amd64_imm64_t const imm = {
		.kind   = X86_IMM_ADDR,
		.entity = entity,
	};
485
	return new_bd_amd64_mov_imm(dbgi, block, INSN_SIZE_64, &imm);
486
}
Matthias Braun's avatar
Matthias Braun committed
487

488
489
490
491
492
493
494
495
496
static ir_node *create_picaddr_lea(ir_node *const block,
                                   x86_immediate_kind_t const kind,
                                   ir_entity *const entity)
{
	amd64_addr_t addr = {
		.immediate = (x86_imm32_t) {
			.kind   = kind,
			.entity = entity,
		},
497
		.variant = X86_ADDR_RIP,
498
	};
499
	return new_bd_amd64_lea(NULL, block, 0, NULL, NULL, INSN_SIZE_64, addr);
500
501
}

Matthias Braun's avatar
Matthias Braun committed
502
static ir_node *gen_be_Relocation(ir_node *const node)
503
504
505
506
507
{
	ir_node             *const block  = be_transform_nodes_block(node);
	ir_entity           *const entity = be_get_Relocation_entity(node);
	x86_immediate_kind_t const kind
		= (x86_immediate_kind_t)be_get_Relocation_kind(node);
Matthias Braun's avatar
Matthias Braun committed
508

509
510
511
	switch (kind) {
	case X86_IMM_ADDR: {
		amd64_imm64_t const imm = {
Matthias Braun's avatar
Matthias Braun committed
512
513
514
			.kind   = X86_IMM_ADDR,
			.entity = entity,
		};
515
		return new_bd_amd64_mov_imm(NULL, block, INSN_SIZE_64, &imm);
516
517
	}
	case X86_IMM_PCREL:
518
519
	case X86_IMM_GOTPCREL: /* can GOTPCREL happen here? */
		return create_picaddr_lea(block, kind, entity);
520
521
522
523
	default:
		break;
	}
	panic("Unexpected relocation kind");
Matthias Braun's avatar
Matthias Braun committed
524
525
}

526
527
ir_node *amd64_new_IncSP(ir_node *block, ir_node *old_sp, int offset,
                         unsigned align)
Tobias Rapp's avatar
Tobias Rapp committed
528
{
529
530
531
532
	ir_node *incsp = be_new_IncSP(&amd64_registers[REG_RSP], block, old_sp,
	                              offset, align);
	arch_add_irn_flags(incsp, arch_irn_flag_modify_flags);
	return incsp;
Tobias Rapp's avatar
Tobias Rapp committed
533
534
}

535
typedef ir_node *(*construct_binop_func)(dbg_info *dbgi, ir_node *block, int arity, ir_node *const *in, arch_register_req_t const **in_reqs, amd64_binop_addr_attr_t const *attr_init);
Matthias Braun's avatar
Matthias Braun committed
536

537
typedef ir_node *(*construct_rax_binop_func)(dbg_info *dbgi, ir_node *block, int arity, ir_node *const *in, arch_register_req_t const **in_reqs, amd64_insn_size_t size, amd64_op_mode_t op_mode, amd64_addr_t addr);
538

Matthias Braun's avatar
Matthias Braun committed
539
540
541
542
543
544
545
546
typedef enum match_flags_t {
	match_am           = 1 << 0,
	match_mode_neutral = 1 << 1,
	match_immediate    = 1 << 2,
	match_commutative  = 1 << 3,
} match_flags_t;

typedef struct amd64_args_t {
Matthias Braun's avatar
Matthias Braun committed
547
548
549
550
	amd64_binop_addr_attr_t     attr;
	ir_node                    *mem_proj;
	ir_node                    *in[4];
	int                         arity;
Matthias Braun's avatar
Matthias Braun committed
551
552
553
	const arch_register_req_t **reqs;
} amd64_args_t;

Matthias Braun's avatar
Matthias Braun committed
554
static bool match_immediate_32(x86_imm32_t *imm, const ir_node *op,
555
556
                               bool can_match_ip_relative,
                               bool upper32_dont_care)
Matthias Braun's avatar
Matthias Braun committed
557
558
{
	assert(mode_needs_gp_reg(get_irn_mode(op)));
Matthias Braun's avatar
Matthias Braun committed
559
560
	assert(imm->offset == 0 && imm->entity == NULL
	       && imm->kind == X86_IMM_VALUE);
561
562
563

	ir_tarval *tv;
	ir_entity *entity;
Matthias Braun's avatar
Matthias Braun committed
564
565
	unsigned   reloc_kind;
	if (!be_match_immediate(op, &tv, &entity, &reloc_kind))
566
567
568
569
		return false;

	int32_t val;
	if (tv) {
Matthias Braun's avatar
Matthias Braun committed
570
571
		if (!tarval_is_long(tv))
			return false;
572
573
		long lval = get_tarval_long(tv);
		val = (int32_t)lval;
Matthias Braun's avatar
Matthias Braun committed
574
575
		if ((long)val != lval)
			return false;
576
577
578
579
		/** the immediate value is signed extended to 64bit, sometimes
		 * this is not what we want. */
		if (!upper32_dont_care && val < 0
		    && !mode_is_signed(get_tarval_mode(tv)))
580
			return false;
581
582
583
584
	} else {
		val = 0;
	}

Matthias Braun's avatar
Matthias Braun committed
585
	x86_immediate_kind_t kind = (x86_immediate_kind_t)reloc_kind;
Matthias Braun's avatar
Matthias Braun committed
586
	if (entity != NULL) {
587
588
589
590
		if (!can_match_ip_relative)
			return false;
		if (kind == X86_IMM_VALUE || kind == X86_IMM_ADDR) {
			kind = X86_IMM_PCREL;
591
		} else if (kind != X86_IMM_PCREL && kind != X86_IMM_PLT)
Matthias Braun's avatar
Matthias Braun committed
592
			return false;
Matthias Braun's avatar
Matthias Braun committed
593
	}
594
595

	imm->entity = entity;
Matthias Braun's avatar
Matthias Braun committed
596
597
	imm->offset = val;
	imm->kind   = kind;
598
	return true;
Matthias Braun's avatar
Matthias Braun committed
599
600
601
602
}

static ir_heights_t *heights;

603
604
605
606
607
608
609
610
611
static bool input_depends_on_load(ir_node *load, ir_node *input)
{
	ir_node *block = get_nodes_block(load);
	/* if the dependency is in another block, then we ignore it for now
	   as we only match address mode loads in the same block. */
	return get_nodes_block(input) == block
	    && heights_reachable_in_block(heights, input, load);
}

Tobias Rapp's avatar
Tobias Rapp committed
612
613
614
615
616
617
618
619
620
static void fix_node_mem_proj(ir_node *node, ir_node *mem_proj)
{
	if (mem_proj == NULL)
		return;

	ir_node *load = get_Proj_pred(mem_proj);
	be_set_transformed_node(load, node);
}

621
static ir_node *source_am_possible(ir_node *block, ir_node *node)
Matthias Braun's avatar
Matthias Braun committed
622
623
{
	if (!is_Proj(node))
624
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
625
626
	ir_node *load = get_Proj_pred(node);
	if (!is_Load(load))
627
		return NULL;
628
	assert(get_Proj_num(node) == pn_Load_res);
Matthias Braun's avatar
Matthias Braun committed
629
	if (get_nodes_block(load) != block)
630
		return NULL;
Matthias Braun's avatar
Matthias Braun committed
631
632
	/* make sure we are the only user */
	if (get_irn_n_edges(node) != 1)
633
		return NULL;
634
635
636
637
638
639
	/* From ia32_transform.c:751:
	 * in some edge cases with address mode we might reach the load normally
	 * and through some AM sequence, if it is already materialized then we
	 * can't create an AM node from it */
	if (be_is_transformed(node))
		return NULL;
640
	return load;
Matthias Braun's avatar
Matthias Braun committed
641
642
643
644
645
646
647
648
649
650
}

static bool needs_extension(ir_node *op)
{
	ir_mode *mode = get_irn_mode(op);
	if (get_mode_size_bits(mode) >= 32)
		return false;
	return !be_upper_bits_clean(op, mode);
}

Matthias Braun's avatar
Matthias Braun committed
651
static ir_node *create_sar(dbg_info *dbgi, ir_node *const new_block,
652
                           amd64_insn_size_t size, ir_node *const value,
Matthias Braun's avatar
Matthias Braun committed
653
                           int32_t immediate)
654
655
656
657
{
	amd64_shift_attr_t attr;
	memset(&attr, 0, sizeof(attr));
	attr.base.op_mode = AMD64_OP_SHIFT_IMM;
658
	attr.size         = size;
Matthias Braun's avatar
Matthias Braun committed
659
660
661
662
	attr.immediate    = immediate;
	ir_node *in[1]    = { value };
	ir_node *const sar = new_bd_amd64_sar(dbgi, new_block, ARRAY_SIZE(in), in,
	                                      reg_reqs, &attr);
663
	arch_set_irn_register_req_out(sar, 0, &amd64_requirement_gp_same_0);
664
	return be_new_Proj(sar, pn_amd64_sar_res);
665
666
}

Tobias Rapp's avatar
Tobias Rapp committed
667
668
static bool use_address_matching(ir_mode *mode, match_flags_t flags,
                                 ir_node *block,
669
670
671
                                 ir_node *op1, ir_node *op2,
                                 ir_node **out_load, ir_node **out_op)
{
672
673
	if (! (flags & match_am))
		return false;
674

Tobias Rapp's avatar
Tobias Rapp committed
675
676
677
678
	unsigned mode_bits = get_mode_size_bits(mode);
	if (mode_bits == 8 || mode_bits == 16)
		return false;

679
680
681
	ir_node *load2 = source_am_possible(block, op2);
	if (load2 != NULL && !input_depends_on_load(load2, op1)) {
		(*out_load) = load2;
682
683
		(*out_op)   = op1;
		return true;
684
685
686
687
688
689
690
691
692
	}

	if (flags & match_commutative) {
		ir_node *load1 = source_am_possible(block, op1);
		if (load1 != NULL && !input_depends_on_load(load1, op2)) {
			(*out_load) = load1;
			(*out_op)   = op2;
			return true;
		}
693
694
695
696
	}
	return false;
}

697
698
static void perform_address_matching(ir_node *ptr, int *arity,
                                     ir_node **in, amd64_addr_t *addr)
699
700
701
{
	x86_address_t maddr;
	memset(&maddr, 0, sizeof(maddr));
702
	x86_create_address_mode(&maddr, ptr, x86_create_am_normal);
703

704
705
	x86_addr_variant_t variant = maddr.variant;
	assert(variant != X86_ADDR_INVALID);
706
	if (x86_addr_variant_has_base(variant)) {
707
708
709
710
		int base_input   = (*arity)++;
		addr->base_input = base_input;
		in[base_input]   = be_transform_node(maddr.base);
	} else {
711
		assert(maddr.base == NULL);
712
	}
713
	if (x86_addr_variant_has_index(variant)) {
714
		int index_input   = (*arity)++;
715
		addr->index_input = index_input;
716
		in[index_input]   = be_transform_node(maddr.index);
717
	} else {
718
		assert(maddr.index == NULL);
719
	}
720
	ir_entity *entity = maddr.imm.entity;
721
	if (entity != NULL && is_parameter_entity(entity) &&
722
		get_entity_parameter_number(entity) == IR_VA_START_PARAMETER_NUMBER)
Andreas Fried's avatar
Andreas Fried committed
723
		panic("perform_address_matching: Request for invalid parameter (va_start parameter)");
724

Matthias Braun's avatar
Matthias Braun committed
725
726
	addr->immediate = maddr.imm;
	addr->log_scale = maddr.scale;
727
	addr->variant   = variant;
728
729
}

Matthias Braun's avatar
Matthias Braun committed
730
731
732
static void match_binop(amd64_args_t *args, ir_node *block,
                        ir_mode *mode, ir_node *op1, ir_node *op2,
                        match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
733
734
735
{
	memset(args, 0, sizeof(*args));

736
	bool use_xmm       = mode_is_float(mode);
Matthias Braun's avatar
Matthias Braun committed
737
	bool use_immediate = flags & match_immediate;
738
	bool mode_neutral  = flags & match_mode_neutral;
Matthias Braun's avatar
Matthias Braun committed
739

740
741
	amd64_binop_addr_attr_t *const attr = &args->attr;
	attr->base.size = get_insn_size_from_mode(mode);
Matthias Braun's avatar
Matthias Braun committed
742
743

	/* TODO: legalize phase */
744
	if (mode_neutral) {
745
746
		op1 = be_skip_downconv(op1, true);
		op2 = be_skip_downconv(op2, true);
Matthias Braun's avatar
Matthias Braun committed
747
748
749
750
751
752
753
	} else {
		/* TODO: extend inputs? */
		(void)needs_extension;
	}

	ir_node *load;
	ir_node *op;
Matthias Braun's avatar
Matthias Braun committed
754
755
	bool     use_am
		= use_address_matching(mode, flags, block, op1, op2, &load, &op);
756

757
	amd64_addr_t *addr = &attr->base.addr;
758
	if (use_immediate
759
	    && match_immediate_32(&attr->u.immediate, op2, false, mode_neutral)) {
760
		assert(!use_xmm && "Can't (yet) match binop with xmm immediate");
Matthias Braun's avatar
Matthias Braun committed
761
		/* fine, we found an immediate */
762
763
764
765
766
767
		int const reg_input = args->arity++;
		args->in[reg_input]     = be_transform_node(op1);
		addr->variant           = X86_ADDR_REG;
		addr->base_input        = reg_input;
		attr->base.base.op_mode = AMD64_OP_REG_IMM;
		args->reqs              = reg_reqs;
768
	} else if (use_am) {
769
770
771
		int const reg_input = args->arity++;
		attr->u.reg_input   = reg_input;
		args->in[reg_input] = be_transform_node(op);
Matthias Braun's avatar
Matthias Braun committed
772

773
774
775
		ir_node *ptr = get_Load_ptr(load);
		perform_address_matching(ptr, &(args->arity), args->in, addr);

776
777
		args->reqs = (use_xmm ? xmm_am_reqs : gp_am_reqs)[args->arity];

778
779
780
781
		ir_node *new_mem    = be_transform_node(get_Load_mem(load));
		int mem_input       = args->arity++;
		args->in[mem_input] = new_mem;
		addr->mem_input     = mem_input;
782

783
		args->mem_proj      = get_Proj_for_pn(load, pn_Load_M);
784
		attr->base.base.op_mode = AMD64_OP_REG_ADDR;
Matthias Braun's avatar
Matthias Braun committed
785
786
	} else {
		/* simply transform the arguments */
787
788
789
790
791
792
793
794
		int const reg_input0 = args->arity++;
		int const reg_input1 = args->arity++;
		args->in[reg_input0]    = be_transform_node(op1);
		args->in[reg_input1]    = be_transform_node(op2);
		addr->variant           = X86_ADDR_REG;
		addr->base_input        = reg_input0;
		attr->u.reg_input       = reg_input1;
		attr->base.base.op_mode = AMD64_OP_REG_REG;
795

796
		args->reqs = use_xmm ? amd64_xmm_xmm_reqs : amd64_reg_reg_reqs;
Matthias Braun's avatar
Matthias Braun committed
797
798
799
800
	}
}

static ir_node *gen_binop_am(ir_node *node, ir_node *op1, ir_node *op2,
801
802
                             construct_binop_func func, unsigned pn_res,
                             match_flags_t flags)
Matthias Braun's avatar
Matthias Braun committed
803
804
805
806
{
	ir_node *block = get_nodes_block(node);
	ir_mode *mode  = get_irn_mode(node);
	amd64_args_t args;
Matthias Braun's avatar
Matthias Braun committed
807
	match_binop(&args, block, mode, op1, op2, flags);
Matthias Braun's avatar
Matthias Braun committed
808
809
810

	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);
811
	ir_node  *const new_node  = func(dbgi, new_block, args.arity, args.in, args.reqs, &args.attr);
812

Tobias Rapp's avatar
Tobias Rapp committed
813
	fix_node_mem_proj(new_node, args.mem_proj);
Matthias Braun's avatar
Matthias Braun committed
814

815
816
817
818
819
820
821
	if (mode_is_float(mode)) {
		arch_set_irn_register_req_out(new_node, 0,
		                              &amd64_requirement_xmm_same_0);
	} else {
		arch_set_irn_register_req_out(new_node, 0,
		                              &amd64_requirement_gp_same_0);
	}
822
	return be_new_Proj(new_node, pn_res);
Matthias Braun's avatar
Matthias Braun committed
823
824
}

825
static ir_node *gen_binop_rax(ir_node *node, ir_node *op0, ir_node *op1,
826
827
828
                              construct_rax_binop_func make_node,
                              match_flags_t flags)
{
Matthias Braun's avatar
Matthias Braun committed
829
	bool mode_neutral = flags & match_mode_neutral;
830
831
	assert(! (flags & match_immediate));

832
	ir_mode          *mode = get_irn_mode(op0);
833
	amd64_insn_size_t size = get_insn_size_from_mode(mode);
834
835
836

	/* TODO: legalize phase */
	if (mode_neutral) {
837
		op0 = be_skip_downconv(op0, true);
838
		op1 = be_skip_downconv(op1, true);
839
840
841
842
843
	} else {
		/* TODO: extend inputs? */
		(void)needs_extension;
	}

Matthias Braun's avatar
Matthias Braun committed
844
	ir_node *block = get_nodes_block(node);
845
846
	ir_node *load;
	ir_node *op;
Matthias Braun's avatar
Matthias Braun committed
847
	bool     use_am
848
		= use_address_matching(mode, flags, block, op0, op1, &load, &op);
Matthias Braun's avatar
Matthias Braun committed
849

850
851
852
853
854
855
	ir_node *in[4];
	int      arity = 0;
	amd64_op_mode_t op_mode;
	amd64_addr_t    addr;
	memset(&addr, 0, sizeof(addr));

Matthias Braun's avatar
Matthias Braun committed
856
	const arch_register_req_t **reqs;
857
	ir_node *mem_proj = NULL;
858
859
860
861
862
	if (use_am) {
		ir_node *new_op    = be_transform_node(op);
		int      reg_input = arity++;
		in[reg_input]      = new_op;

863
864
865
		ir_node *ptr = get_Load_ptr(load);
		perform_address_matching(ptr, &arity, in, &addr);

866
		reqs = gp_am_reqs[arity];
867
868
869
870
871

		ir_node *new_mem = be_transform_node(get_Load_mem(load));
		int mem_input    = arity++;
		in[mem_input]    = new_mem;
		addr.mem_input   = mem_input;
872

873
		mem_proj                = get_Proj_for_pn(load, pn_Load_M);
874
		op_mode                 = AMD64_OP_ADDR;
875
876
	} else {
		/* simply transform the arguments */
877
878
879
880
881
882
883
884
		int const input0 = arity++;
		int const input1 = arity++;
		in[input0]      = be_transform_node(op0);
		in[input1]      = be_transform_node(op1);
		reqs            = reg_rax_reqs;
		op_mode         = AMD64_OP_REG;
		addr.variant    = X86_ADDR_REG;
		addr.base_input = input0;
885
886
	}

887
	assert((size_t)arity <= ARRAY_SIZE(in));
888
889
	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);
890
	ir_node  *const new_node  = make_node(dbgi, new_block, arity, in, reqs, size, op_mode, addr);
891
892
893
	if (mem_proj != NULL) {
		be_set_transformed_node(load, new_node);
	}
894
895
896
	return new_node;
}

897
898
899
900
901
902
903
904
static ir_node *gen_binop_xmm(ir_node *node, ir_node *op0, ir_node *op1,
                              construct_binop_func make_node,
                              match_flags_t flags)
{
	ir_node *block = get_nodes_block(node);
	ir_mode *mode  = get_irn_mode(op0);
	amd64_args_t args;
	memset(&args, 0, sizeof(args));
905
906
	amd64_binop_addr_attr_t *const attr = &args.attr;
	attr->base.size = INSN_SIZE_64;
907
908
909
910
911
912

	ir_node *load;
	ir_node *op;
	bool use_am = use_address_matching(mode, flags, block, op0, op1, &load,
	                                   &op);

913
	amd64_addr_t *addr = &attr->base.addr;
914
915
	if (use_am) {
		int reg_input = args.arity++;
916
917
		attr->u.reg_input  = reg_input;
		args.in[reg_input] = be_transform_node(op);
918
919
920
921

		ir_node      *ptr  = get_Load_ptr(load);
		perform_address_matching(ptr, &args.arity, args.in, addr);

922
		args.reqs = xmm_am_reqs[args.arity];
923
924
925
926
927
928
929

		ir_node *new_mem   = be_transform_node(get_Load_mem(load));
		int mem_input      = args.arity++;
		args.in[mem_input] = new_mem;
		addr->mem_input    = mem_input;

		args.mem_proj      = get_Proj_for_pn(load, pn_Load_M);
930
		attr->base.base.op_mode = AMD64_OP_REG_ADDR;
931
	} else {
932
933
934
935
936
937
938
939
940
		int const input0 = args.arity++;
		int const input1 = args.arity++;
		args.in[input0]         = be_transform_node(op0);
		args.in[input1]         = be_transform_node(op1);
		addr->base_input        = input0;
		addr->variant           = X86_ADDR_REG;
		attr->u.reg_input       = input1;
		attr->base.base.op_mode = AMD64_OP_REG_REG;
		args.reqs               = amd64_xmm_xmm_reqs;
941
942
943
944
	}

	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(block);
945
	ir_node  *const new_node  = make_node(dbgi, new_block, args.arity, args.in, args.reqs, &args.attr);
946
947
948
949
950

	fix_node_mem_proj(new_node, args.mem_proj);

	arch_set_irn_register_req_out(new_node, 0,
								  &amd64_requirement_xmm_same_0);
951
	return be_new_Proj(new_node, pn_amd64_subs_res);
952
953
}

954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
typedef ir_node *(*construct_x87_binop_func)(
		dbg_info *dbgi, ir_node *block, ir_node *op0, ir_node *op1);

static ir_node *gen_binop_x87(ir_node *const node, ir_node *const op0,
                              ir_node *const op1, construct_x87_binop_func cons)
{
	dbg_info *const dbgi      = get_irn_dbg_info(node);
	ir_node  *const new_block = be_transform_node(get_nodes_block(node));
	ir_node  *const new_op0   = be_transform_node(op0);
	ir_node  *const new_op1   = be_transform_node(op1);
	ir_node  *const res       = cons(dbgi, new_block, new_op0, new_op1);
	/* TODO: address modes */
	return res;
}

969
typedef ir_node *(*construct_shift_func)(dbg_info *dbgi, ir_node *block, int arity, ir_node *const *in, arch_register_req_t const **in_reqs, amd64_shift_attr_t const *attr_init);
Matthias Braun's avatar
Matthias Braun committed
970
971

static ir_node *gen_shift_binop(ir_node *node, ir_node *op1, ir_node *op2,
972
973
                                construct_shift_func func, unsigned