Commit 25b6a365 authored by yb9976's avatar yb9976
Browse files

Slightly simplify some fallthrough handling code

parent 1f846213
......@@ -740,11 +740,10 @@ static void emit_amd64_jcc(const ir_node *irn)
amd64_emitf(proj_true, "j%PX %L", (int)cc);
ir_node const *const false_target = be_emit_get_cfop_target(proj_false);
if (be_emit_get_prev_block(false_target) == block) {
if (be_options.verbose_asm)
amd64_emitf(proj_false, "/* fallthrough to %L */");
} else {
if (be_emit_get_prev_block(false_target) != block) {
amd64_emitf(proj_false, "jmp %L");
} else if (be_options.verbose_asm) {
amd64_emitf(proj_false, "/* fallthrough to %L */");
}
}
......
......@@ -466,12 +466,10 @@ static void emit_arm_B(const ir_node *irn)
arm_emitf(irn, "b%s %t", suffix, proj_true);
ir_node const *const false_target = be_emit_get_cfop_target(proj_false);
if (be_emit_get_prev_block(false_target) == block) {
if (be_options.verbose_asm) {
arm_emitf(irn, "/* fallthrough to %t */", proj_false);
}
} else {
if (be_emit_get_prev_block(false_target) != block) {
arm_emitf(irn, "b %t", proj_false);
} else if (be_options.verbose_asm) {
arm_emitf(irn, "/* fallthrough to %t */", proj_false);
}
}
......
......@@ -778,11 +778,10 @@ static void emit_ia32_Jcc(const ir_node *node)
}
/* the second Proj might be a fallthrough */
if (fallthrough) {
if (be_options.verbose_asm)
ia32_emitf(proj_false, "/* fallthrough to %L */");
} else {
if (!fallthrough) {
ia32_emitf(proj_false, "jmp %L");
} else if (be_options.verbose_asm) {
ia32_emitf(proj_false, "/* fallthrough to %L */");
}
}
......@@ -852,11 +851,10 @@ static void emit_ia32_Jmp(const ir_node *node)
/* we have a block schedule */
ir_node *block = get_nodes_block(node);
ir_node *target = be_emit_get_cfop_target(node);
if (fallthrough_possible(block, target)) {
if (be_options.verbose_asm)
ia32_emitf(node, "/* fallthrough to %L */");
} else {
if (!fallthrough_possible(block, target)) {
ia32_emitf(node, "jmp %L");
} else if (be_options.verbose_asm) {
ia32_emitf(node, "/* fallthrough to %L */");
}
}
......
......@@ -1186,16 +1186,14 @@ static void emit_sparc_branch(const ir_node *node, get_cc_func get_cc)
const ir_node *block = get_nodes_block(node);
const ir_node *proj_target = be_emit_get_cfop_target(proj_false);
if (be_emit_get_prev_block(proj_target) == block) {
if (be_options.verbose_asm) {
sparc_emitf(node, "/* fallthrough to %L */", proj_false);
}
} else {
if (be_emit_get_prev_block(proj_target) != block) {
sparc_emitf(node, "ba %L", proj_false);
/* TODO: fill this slot as well */
emitting_delay_slot = true;
sparc_emitf(NULL, "nop");
emitting_delay_slot = false;
} else if (be_options.verbose_asm) {
sparc_emitf(node, "/* fallthrough to %L */", proj_false);
}
}
......@@ -1226,13 +1224,11 @@ static void emit_sparc_fbfcc(const ir_node *node)
static void emit_sparc_Ba(const ir_node *node)
{
if (ba_is_fallthrough(node)) {
if (be_options.verbose_asm) {
sparc_emitf(node, "/* fallthrough to %L */", node);
}
} else {
if (!ba_is_fallthrough(node)) {
sparc_emitf(node, "ba %L", node);
fill_delay_slot(node);
} else if (be_options.verbose_asm) {
sparc_emitf(node, "/* fallthrough to %L */", node);
}
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment