Avoid random stmt order result in pass_waccess::use_after_inval_p

use_after_inval_p uses stmt UIDs to speed up repeated dominance
checks within a basic-block but it fails to assign UIDs to PHIs
which means compares with PHIs in the same block get a random
result.

The following factors renumber_gimple_stmt_uids to expose a new
renumber_gimple_stmt_uids_in_block we can share.

But since we rely on processing even earlier PHIs to follow
pointer adjustments (we look at those even if earlier) the patch
also moves PHI handling out of the use_after_inval_p guard.
This then also fixes PR109141.

	PR tree-optimization/109141
	* tree-dfa.h (renumber_gimple_stmt_uids_in_block): New.
	* tree-dfa.cc (renumber_gimple_stmt_uids_in_block): Split
	out from ...
	(renumber_gimple_stmt_uids): ... here and
	(renumber_gimple_stmt_uids_in_blocks): ... here.
	* gimple-ssa-warn-access.cc (pass_waccess::use_after_inval_p):
	Use renumber_gimple_stmt_uids_in_block to also assign UIDs
	to PHIs.
	(pass_waccess::check_pointer_uses): Process all PHIs.
This commit is contained in:
Richard Biener 2023-03-15 11:41:20 +01:00
parent af4f681666
commit adb70c2d10
3 changed files with 37 additions and 49 deletions

View file

@ -3862,13 +3862,7 @@ pass_waccess::use_after_inval_p (gimple *inval_stmt, gimple *use_stmt,
to consecutive statements in it. Use the ids to determine which
precedes which. This avoids the linear traversal on subsequent
visits to the same block. */
for (auto si = gsi_start_bb (inval_bb); !gsi_end_p (si);
gsi_next_nondebug (&si))
{
gimple *stmt = gsi_stmt (si);
unsigned uid = inc_gimple_stmt_max_uid (m_func);
gimple_set_uid (stmt, uid);
}
renumber_gimple_stmt_uids_in_block (m_func, inval_bb);
return gimple_uid (inval_stmt) < gimple_uid (use_stmt);
}
@ -4239,27 +4233,26 @@ pass_waccess::check_pointer_uses (gimple *stmt, tree ptr,
tree_code code = gimple_cond_code (cond);
equality = code == EQ_EXPR || code == NE_EXPR;
}
else if (gimple_code (use_stmt) == GIMPLE_PHI)
{
/* Only add a PHI result to POINTERS if all its
operands are related to PTR, otherwise continue. */
tree lhs = gimple_phi_result (use_stmt);
if (!pointers_related_p (stmt, lhs, ptr, m_ptr_qry))
continue;
if (TREE_CODE (lhs) == SSA_NAME)
{
pointers.safe_push (lhs);
continue;
}
}
/* Warn if USE_STMT is dominated by the deallocation STMT.
Otherwise, add the pointer to POINTERS so that the uses
of any other pointers derived from it can be checked. */
if (use_after_inval_p (stmt, use_stmt, check_dangling))
{
if (gimple_code (use_stmt) == GIMPLE_PHI)
{
/* Only add a PHI result to POINTERS if all its
operands are related to PTR, otherwise continue. */
tree lhs = gimple_phi_result (use_stmt);
if (!pointers_related_p (stmt, lhs, ptr, m_ptr_qry))
continue;
if (TREE_CODE (lhs) == SSA_NAME)
{
pointers.safe_push (lhs);
continue;
}
}
basic_block use_bb = gimple_bb (use_stmt);
bool this_maybe
= (maybe

View file

@ -59,6 +59,25 @@ static void collect_dfa_stats (struct dfa_stats_d *);
Dataflow analysis (DFA) routines
---------------------------------------------------------------------------*/
/* Renumber the gimple stmt uids in one block. The caller is responsible
of calling set_gimple_stmt_max_uid (fun, 0) at some point. */
void
renumber_gimple_stmt_uids_in_block (struct function *fun, basic_block bb)
{
gimple_stmt_iterator bsi;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
}
}
/* Renumber all of the gimple stmt uids. */
void
@ -68,19 +87,7 @@ renumber_gimple_stmt_uids (struct function *fun)
set_gimple_stmt_max_uid (fun, 0);
FOR_ALL_BB_FN (bb, fun)
{
gimple_stmt_iterator bsi;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fun));
}
}
renumber_gimple_stmt_uids_in_block (fun, bb);
}
/* Like renumber_gimple_stmt_uids, but only do work on the basic blocks
@ -93,20 +100,7 @@ renumber_gimple_stmt_uids_in_blocks (basic_block *blocks, int n_blocks)
set_gimple_stmt_max_uid (cfun, 0);
for (i = 0; i < n_blocks; i++)
{
basic_block bb = blocks[i];
gimple_stmt_iterator bsi;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
gimple *stmt = gsi_stmt (bsi);
gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
}
}
renumber_gimple_stmt_uids_in_block (cfun, blocks[i]);
}

View file

@ -20,6 +20,7 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_TREE_DFA_H
#define GCC_TREE_DFA_H
extern void renumber_gimple_stmt_uids_in_block (struct function *, basic_block);
extern void renumber_gimple_stmt_uids (struct function *);
extern void renumber_gimple_stmt_uids_in_blocks (basic_block *, int);
extern void dump_variable (FILE *, tree);