tree-optimization/113831 - wrong VN with structurally identical ref

When we use get_ref_base_and_extent during VN and that ends up using
global ranges to restrict the range of a ref we have to take care
of not using the same expression in the hashtable as for a ref that
could not use that global range.  The following attempts to ensure
this by applying similar logic as get_ref_base_and_extent to
copy_reference_ops_from_ref so they behave consistent.

	PR tree-optimization/113831
	PR tree-optimization/108355
	* tree-ssa-sccvn.cc (copy_reference_ops_from_ref): When
	we see variable array indices and get_ref_base_and_extent
	can resolve those to constants fix up the ops to constants
	as well.
	(ao_ref_init_from_vn_reference): Use 'off' member for
	ARRAY_REF and ARRAY_RANGE_REF instead of recomputing it.
	(valueize_refs_1): Also fixup 'off' of ARRAY_RANGE_REF.

	* gcc.dg/torture/pr113831.c: New testcase.
	* gcc.dg/tree-ssa/ssa-fre-104.c: Likewise.
This commit is contained in:
Richard Biener 2024-02-09 10:16:38 +01:00
parent 0437cbdccb
commit 938a419182
3 changed files with 139 additions and 14 deletions

View file

@ -0,0 +1,26 @@
/* { dg-do run } */
int a[3];
int __attribute__((noipa))
foo(int i, int x)
{
int tem = 0;
a[2] = x;
if (i < 1)
++i;
else
{
++i;
tem = a[i];
}
tem += a[i];
return tem;
}
int
main()
{
if (foo (0, 7) != 0)
__builtin_abort();
return 0;
}

View file

@ -0,0 +1,24 @@
/* { dg-do compile } */
/* { dg-options "-O2 -fdump-tree-fre1" } */
int a;
int *b = &a;
int **c = &b;
int d;
void bar25_(void);
void foo(void);
int main() {
int e[][1] = {0, 0, 0, 0, 0, 1};
for (;;) {
bar25_();
/* We should optimistically treat a == 0 because of the bounds of
the object. */
if (e[5][a])
break;
e[a][0] = 0;
foo();
}
*c = &d;
}
/* { dg-final { scan-tree-dump-not "foo" "fre1" } } */

View file

@ -898,6 +898,8 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
{
/* For non-calls, store the information that makes up the address. */
tree orig = ref;
unsigned start = result->length ();
bool seen_variable_array_ref = false;
while (ref)
{
vn_reference_op_s temp;
@ -984,6 +986,12 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
/* Record index as operand. */
temp.op0 = TREE_OPERAND (ref, 1);
/* When the index is not constant we have to apply the same
logic as get_ref_base_and_extent which eventually uses
global ranges to refine the overall ref extent. Record
we've seen such a case, fixup below. */
if (TREE_CODE (temp.op0) == SSA_NAME)
seen_variable_array_ref = true;
/* Always record lower bounds and element size. */
temp.op1 = array_ref_low_bound (ref);
/* But record element size in units of the type alignment. */
@ -1076,6 +1084,82 @@ copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
else
ref = NULL_TREE;
}
poly_int64 offset, size, max_size;
tree base;
bool rev;
if (seen_variable_array_ref
&& handled_component_p (orig)
&& (base = get_ref_base_and_extent (orig,
&offset, &size, &max_size, &rev))
&& known_size_p (max_size)
&& known_eq (size, max_size))
{
poly_int64 orig_offset = offset;
poly_int64 tem;
if (TREE_CODE (base) == MEM_REF
&& mem_ref_offset (base).to_shwi (&tem))
offset += tem * BITS_PER_UNIT;
HOST_WIDE_INT coffset = offset.to_constant ();
/* When get_ref_base_and_extent computes an offset constrained to
a constant position we have to fixup variable array indexes in
the ref to avoid the situation where based on context we'd have
to value-number the same vn_reference ops differently. Make
the vn_reference ops differ by adjusting those indexes to
appropriate constants. */
poly_int64 off = 0;
for (unsigned i = result->length (); i > start; --i)
{
auto &op = (*result)[i-1];
if ((op.opcode == ARRAY_REF
|| op.opcode == ARRAY_RANGE_REF)
&& TREE_CODE (op.op0) == SSA_NAME)
{
/* There's a single constant index that get's 'off' closer
to 'offset'. */
unsigned HOST_WIDE_INT elsz
= tree_to_uhwi (op.op2) * vn_ref_op_align_unit (&op);
unsigned HOST_WIDE_INT idx
= (coffset / BITS_PER_UNIT - off.to_constant ()) / elsz;
if (idx == 0)
op.op0 = op.op1;
else
op.op0 = wide_int_to_tree (TREE_TYPE (op.op0),
wi::to_poly_wide (op.op1) + idx);
op.off = idx * elsz;
}
else
{
if (op.opcode == ERROR_MARK)
/* two-ops codes have the offset in the first op. */
;
else if (op.opcode == ADDR_EXPR
|| op.opcode == SSA_NAME
|| op.opcode == CONSTRUCTOR
|| TREE_CODE_CLASS (op.opcode) == tcc_declaration
|| TREE_CODE_CLASS (op.opcode) == tcc_constant)
/* end-of ref. */
gcc_assert (i == result->length ());
else
{
gcc_assert (known_ne (op.off, -1));
off += op.off;
}
}
}
if (flag_checking)
{
ao_ref r;
if (start != 0)
;
else if (ao_ref_init_from_vn_reference (&r, 0, 0, TREE_TYPE (orig),
*result))
gcc_assert (known_eq (r.offset, orig_offset)
&& known_eq (r.size, size)
&& known_eq (r.max_size, max_size));
else
gcc_unreachable ();
}
}
}
/* Build a alias-oracle reference abstraction in *REF from the vn_reference
@ -1203,21 +1287,11 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
case ARRAY_RANGE_REF:
case ARRAY_REF:
/* We recorded the lower bound and the element size. */
if (!poly_int_tree_p (op->op0)
|| !poly_int_tree_p (op->op1)
|| TREE_CODE (op->op2) != INTEGER_CST)
/* Use the recored constant offset. */
if (maybe_eq (op->off, -1))
max_size = -1;
else
{
poly_offset_int woffset
= wi::sext (wi::to_poly_offset (op->op0)
- wi::to_poly_offset (op->op1),
TYPE_PRECISION (sizetype));
woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
woffset <<= LOG2_BITS_PER_UNIT;
offset += woffset;
}
offset += op->off << LOG2_BITS_PER_UNIT;
break;
case REALPART_EXPR:
@ -1724,7 +1798,8 @@ re_valueize:
}
/* If it transforms a non-constant ARRAY_REF into a constant
one, adjust the constant offset. */
else if (vro->opcode == ARRAY_REF
else if ((vro->opcode == ARRAY_REF
|| vro->opcode == ARRAY_RANGE_REF)
&& known_eq (vro->off, -1)
&& poly_int_tree_p (vro->op0)
&& poly_int_tree_p (vro->op1)