diff --git a/gcc/poly-int.h b/gcc/poly-int.h index 2bf9d98599f..d085544a57e 100644 --- a/gcc/poly-int.h +++ b/gcc/poly-int.h @@ -1178,6 +1178,19 @@ lshift (const poly_int_pod &a, const Cb &b) } } +/* Poly version of sext_hwi, with the same interface. */ + +template +inline poly_int +sext_hwi (const poly_int &a, unsigned int precision) +{ + poly_int_pod r; + for (unsigned int i = 0; i < N; i++) + r.coeffs[i] = sext_hwi (a.coeffs[i], precision); + return r; +} + + /* Return true if a0 + a1 * x might equal b0 + b1 * x for some nonnegative integer x. */ diff --git a/gcc/tree-ssa-sccvn.cc b/gcc/tree-ssa-sccvn.cc index 76587632312..3732d06b0bb 100644 --- a/gcc/tree-ssa-sccvn.cc +++ b/gcc/tree-ssa-sccvn.cc @@ -3684,7 +3684,12 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind, break; off += vro->off; } - if (i == operands.length () - 1) + if (i == operands.length () - 1 + /* Make sure we the offset we accumulated in a 64bit int + fits the address computation carried out in target + offset precision. */ + && (off.coeffs[0] + == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype)))) { gcc_assert (operands[i-1].opcode == MEM_REF); tree ops[2]; @@ -3808,7 +3813,12 @@ vn_reference_insert (tree op, tree result, tree vuse, tree vdef) break; off += vro->off; } - if (i == operands.length () - 1) + if (i == operands.length () - 1 + /* Make sure we the offset we accumulated in a 64bit int + fits the address computation carried out in target + offset precision. */ + && (off.coeffs[0] + == sext_hwi (off.coeffs[0], TYPE_PRECISION (sizetype)))) { gcc_assert (operands[i-1].opcode == MEM_REF); tree ops[2];