diff --git a/gcc/ChangeLog b/gcc/ChangeLog index d31aec79a0a..f2fe41f7a9f 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,9 @@ +2010-07-28 Richard Guenther + + PR middle-end/44903 + * builtins.c (fold_builtin_memory_op): On STRICT_ALIGNMENT + targets try harder to not generate unaligned accesses. + 2010-07-28 Maxim Kuvyrkov PR rtl-optimization/45101 diff --git a/gcc/builtins.c b/gcc/builtins.c index 9b6fb1033b1..b20426cd784 100644 --- a/gcc/builtins.c +++ b/gcc/builtins.c @@ -8474,7 +8474,10 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src, STRIP_NOPS (srcvar); if (TREE_CODE (srcvar) == ADDR_EXPR && var_decl_component_p (TREE_OPERAND (srcvar, 0)) - && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)) + && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len) + && (!STRICT_ALIGNMENT + || !destvar + || src_align >= (int) TYPE_ALIGN (desttype))) srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype, srcvar, off0); else @@ -8485,11 +8488,17 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src, if (srcvar == NULL_TREE) { + if (STRICT_ALIGNMENT + && src_align < (int) TYPE_ALIGN (desttype)) + return NULL_TREE; STRIP_NOPS (src); srcvar = fold_build2 (MEM_REF, desttype, src, off0); } else if (destvar == NULL_TREE) { + if (STRICT_ALIGNMENT + && dest_align < (int) TYPE_ALIGN (srctype)) + return NULL_TREE; STRIP_NOPS (dest); destvar = fold_build2 (MEM_REF, srctype, dest, off0); }