Commit 132dc691 authored by Michael Beck's avatar Michael Beck
Browse files

preliminary code for unaligned Load/Store lowering added

[r15438]
parent a420be62
......@@ -506,6 +506,20 @@ static void lower_bitfields_stores(ir_node *store) {
set_Store_ptr(store, ptr);
} /* lower_bitfields_stores */
/**
* Lowers unaligned Loads.
*/
static void lower_unaligned_Load(ir_node *load) {
/* NYI */
}
/**
* Lowers unaligned Stores
*/
static void lower_unaligned_Store(ir_node *store) {
/* NYI */
}
/**
* lowers IR-nodes, called from walker
*/
......@@ -518,6 +532,14 @@ static void lower_irnode(ir_node *irn, void *env) {
case iro_SymConst:
lower_symconst(irn);
break;
case iro_Load:
if (env != NULL && get_Load_align(irn) == align_non_aligned)
lower_unaligned_Load(irn);
break;
case iro_Store:
if (env != NULL && get_Store_align(irn) == align_non_aligned)
lower_unaligned_Store(irn);
break;
default:
break;
}
......@@ -563,7 +585,7 @@ void lower_highlevel(void) {
/* First step: lower bitfield access: must be run as long as Sels still exists. */
irg_walk_graph(irg, NULL, lower_bf_access, NULL);
/* Finally: lower SymConst-Size and Sel nodes. */
/* Finally: lower SymConst-Size and Sel nodes, unaligned Load/Stores. */
irg_walk_graph(irg, NULL, lower_irnode, NULL);
set_irg_phase_low(irg);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment