tree-vect-data-refs.c (vect_find_stmt_data_reference): New function, combining stmt data ref gathering and fatal analysis parts.
2018-05-25 Richard Biener <rguenther@suse.de> * tree-vect-data-refs.c (vect_find_stmt_data_reference): New function, combining stmt data ref gathering and fatal analysis parts. (vect_analyze_data_refs): Remove now redudnant code and simplify. * tree-vect-loop.c (vect_get_datarefs_in_loop): Factor out from vect_analyze_loop_2 and use vect_find_stmt_data_reference. * tree-vect-slp.c (vect_slp_bb): Use vect_find_stmt_data_reference. * tree-vectorizer.h (vect_find_stmt_data_reference): Declare. From-SVN: r260754
This commit is contained in:
parent
1623d9f346
commit
8e846c6670
5 changed files with 193 additions and 199 deletions
|
@ -1,3 +1,14 @@
|
|||
2018-05-25 Richard Biener <rguenther@suse.de>
|
||||
|
||||
* tree-vect-data-refs.c (vect_find_stmt_data_reference): New
|
||||
function, combining stmt data ref gathering and fatal analysis
|
||||
parts.
|
||||
(vect_analyze_data_refs): Remove now redudnant code and simplify.
|
||||
* tree-vect-loop.c (vect_get_datarefs_in_loop): Factor out from
|
||||
vect_analyze_loop_2 and use vect_find_stmt_data_reference.
|
||||
* tree-vect-slp.c (vect_slp_bb): Use vect_find_stmt_data_reference.
|
||||
* tree-vectorizer.h (vect_find_stmt_data_reference): Declare.
|
||||
|
||||
2018-05-25 Bin Cheng <bin.cheng@arm.com>
|
||||
|
||||
PR tree-optimization/85720
|
||||
|
|
|
@ -3936,6 +3936,104 @@ vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo,
|
|||
return true;
|
||||
}
|
||||
|
||||
/* Find the data references in STMT, analyze them with respect to LOOP and
|
||||
append them to DATAREFS. Return false if datarefs in this stmt cannot
|
||||
be handled. */
|
||||
|
||||
bool
|
||||
vect_find_stmt_data_reference (loop_p loop, gimple *stmt,
|
||||
vec<data_reference_p> *datarefs)
|
||||
{
|
||||
/* We can ignore clobbers for dataref analysis - they are removed during
|
||||
loop vectorization and BB vectorization checks dependences with a
|
||||
stmt walk. */
|
||||
if (gimple_clobber_p (stmt))
|
||||
return true;
|
||||
|
||||
if (gimple_has_volatile_ops (stmt))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: volatile type ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (stmt_can_throw_internal (stmt))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: statement can throw an "
|
||||
"exception ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
auto_vec<data_reference_p, 2> refs;
|
||||
if (!find_data_references_in_stmt (loop, stmt, &refs))
|
||||
return false;
|
||||
|
||||
if (refs.is_empty ())
|
||||
return true;
|
||||
|
||||
if (refs.length () > 1)
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: more than one data ref "
|
||||
"in stmt: ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (gcall *call = dyn_cast <gcall *> (stmt))
|
||||
if (!gimple_call_internal_p (call)
|
||||
|| (gimple_call_internal_fn (call) != IFN_MASK_LOAD
|
||||
&& gimple_call_internal_fn (call) != IFN_MASK_STORE))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: dr in a call ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
data_reference_p dr = refs.pop ();
|
||||
if (TREE_CODE (DR_REF (dr)) == COMPONENT_REF
|
||||
&& DECL_BIT_FIELD (TREE_OPERAND (DR_REF (dr), 1)))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: statement is bitfield "
|
||||
"access ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (DR_BASE_ADDRESS (dr)
|
||||
&& TREE_CODE (DR_BASE_ADDRESS (dr)) == INTEGER_CST)
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: base addr of dr is a "
|
||||
"constant\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
datarefs->safe_push (dr);
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Function vect_analyze_data_refs.
|
||||
|
||||
Find all the data references in the loop or basic block.
|
||||
|
@ -3974,38 +4072,14 @@ vect_analyze_data_refs (vec_info *vinfo, poly_uint64 *min_vf)
|
|||
{
|
||||
gimple *stmt;
|
||||
stmt_vec_info stmt_info;
|
||||
tree base, offset, init;
|
||||
enum { SG_NONE, GATHER, SCATTER } gatherscatter = SG_NONE;
|
||||
bool simd_lane_access = false;
|
||||
poly_uint64 vf;
|
||||
|
||||
again:
|
||||
if (!dr || !DR_REF (dr))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: unhandled data-ref\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
gcc_assert (DR_REF (dr));
|
||||
stmt = DR_STMT (dr);
|
||||
stmt_info = vinfo_for_stmt (stmt);
|
||||
|
||||
/* Discard clobbers from the dataref vector. We will remove
|
||||
clobber stmts during vectorization. */
|
||||
if (gimple_clobber_p (stmt))
|
||||
{
|
||||
free_data_ref (dr);
|
||||
if (i == datarefs.length () - 1)
|
||||
{
|
||||
datarefs.pop ();
|
||||
break;
|
||||
}
|
||||
datarefs.ordered_remove (i);
|
||||
dr = datarefs[i];
|
||||
goto again;
|
||||
}
|
||||
|
||||
/* Check that analysis of the data-ref succeeded. */
|
||||
if (!DR_BASE_ADDRESS (dr) || !DR_OFFSET (dr) || !DR_INIT (dr)
|
||||
|| !DR_STEP (dr))
|
||||
|
@ -4117,95 +4191,42 @@ again:
|
|||
}
|
||||
}
|
||||
|
||||
if (TREE_CODE (DR_BASE_ADDRESS (dr)) == INTEGER_CST)
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: base addr of dr is a "
|
||||
"constant\n");
|
||||
|
||||
if (is_a <bb_vec_info> (vinfo))
|
||||
break;
|
||||
|
||||
if (gatherscatter != SG_NONE || simd_lane_access)
|
||||
free_data_ref (dr);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (TREE_THIS_VOLATILE (DR_REF (dr)))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: volatile type ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
|
||||
if (is_a <bb_vec_info> (vinfo))
|
||||
break;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if (stmt_can_throw_internal (stmt))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: statement can throw an "
|
||||
"exception ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
|
||||
if (is_a <bb_vec_info> (vinfo))
|
||||
break;
|
||||
|
||||
if (gatherscatter != SG_NONE || simd_lane_access)
|
||||
free_data_ref (dr);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (TREE_CODE (DR_REF (dr)) == COMPONENT_REF
|
||||
&& DECL_BIT_FIELD (TREE_OPERAND (DR_REF (dr), 1)))
|
||||
if (TREE_CODE (DR_BASE_ADDRESS (dr)) == ADDR_EXPR
|
||||
&& VAR_P (TREE_OPERAND (DR_BASE_ADDRESS (dr), 0))
|
||||
&& DECL_NONALIASED (TREE_OPERAND (DR_BASE_ADDRESS (dr), 0)))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: statement is bitfield "
|
||||
"access ");
|
||||
"not vectorized: base object not addressable "
|
||||
"for stmt: ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
|
||||
if (is_a <bb_vec_info> (vinfo))
|
||||
break;
|
||||
|
||||
if (gatherscatter != SG_NONE || simd_lane_access)
|
||||
free_data_ref (dr);
|
||||
return false;
|
||||
{
|
||||
/* In BB vectorization the ref can still participate
|
||||
in dependence analysis, we just can't vectorize it. */
|
||||
STMT_VINFO_VECTORIZABLE (stmt_info) = false;
|
||||
continue;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
base = unshare_expr (DR_BASE_ADDRESS (dr));
|
||||
offset = unshare_expr (DR_OFFSET (dr));
|
||||
init = unshare_expr (DR_INIT (dr));
|
||||
|
||||
if (is_gimple_call (stmt)
|
||||
&& (!gimple_call_internal_p (stmt)
|
||||
|| (gimple_call_internal_fn (stmt) != IFN_MASK_LOAD
|
||||
&& gimple_call_internal_fn (stmt) != IFN_MASK_STORE)))
|
||||
if (is_a <loop_vec_info> (vinfo)
|
||||
&& TREE_CODE (DR_STEP (dr)) != INTEGER_CST)
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
if (nested_in_vect_loop_p (loop, stmt))
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: dr in a call ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: not suitable for strided "
|
||||
"load ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
if (is_a <bb_vec_info> (vinfo))
|
||||
break;
|
||||
|
||||
if (gatherscatter != SG_NONE || simd_lane_access)
|
||||
free_data_ref (dr);
|
||||
return false;
|
||||
STMT_VINFO_STRIDED_P (stmt_info) = true;
|
||||
}
|
||||
|
||||
/* Update DR field in stmt_vec_info struct. */
|
||||
|
@ -4222,6 +4243,9 @@ again:
|
|||
inner loop: *(BASE + INIT + OFFSET). By construction,
|
||||
this address must be invariant in the inner loop, so we
|
||||
can consider it as being used in the outer loop. */
|
||||
tree base = unshare_expr (DR_BASE_ADDRESS (dr));
|
||||
tree offset = unshare_expr (DR_OFFSET (dr));
|
||||
tree init = unshare_expr (DR_INIT (dr));
|
||||
tree init_offset = fold_build2 (PLUS_EXPR, TREE_TYPE (offset),
|
||||
init, offset);
|
||||
tree init_addr = fold_build_pointer_plus (base, init_offset);
|
||||
|
@ -4267,24 +4291,7 @@ again:
|
|||
}
|
||||
}
|
||||
|
||||
if (STMT_VINFO_DATA_REF (stmt_info))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: more than one data ref "
|
||||
"in stmt: ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
|
||||
if (is_a <bb_vec_info> (vinfo))
|
||||
break;
|
||||
|
||||
if (gatherscatter != SG_NONE || simd_lane_access)
|
||||
free_data_ref (dr);
|
||||
return false;
|
||||
}
|
||||
|
||||
gcc_assert (!STMT_VINFO_DATA_REF (stmt_info));
|
||||
STMT_VINFO_DATA_REF (stmt_info) = dr;
|
||||
if (simd_lane_access)
|
||||
{
|
||||
|
@ -4293,27 +4300,6 @@ again:
|
|||
datarefs[i] = dr;
|
||||
}
|
||||
|
||||
if (TREE_CODE (DR_BASE_ADDRESS (dr)) == ADDR_EXPR
|
||||
&& VAR_P (TREE_OPERAND (DR_BASE_ADDRESS (dr), 0))
|
||||
&& DECL_NONALIASED (TREE_OPERAND (DR_BASE_ADDRESS (dr), 0)))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: base object not addressable "
|
||||
"for stmt: ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
if (is_a <bb_vec_info> (vinfo))
|
||||
{
|
||||
/* In BB vectorization the ref can still participate
|
||||
in dependence analysis, we just can't vectorize it. */
|
||||
STMT_VINFO_VECTORIZABLE (stmt_info) = false;
|
||||
continue;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Set vectype for STMT. */
|
||||
scalar_type = TREE_TYPE (DR_REF (dr));
|
||||
STMT_VINFO_VECTYPE (stmt_info)
|
||||
|
@ -4391,23 +4377,6 @@ again:
|
|||
datarefs[i] = dr;
|
||||
STMT_VINFO_GATHER_SCATTER_P (stmt_info) = gatherscatter;
|
||||
}
|
||||
|
||||
else if (is_a <loop_vec_info> (vinfo)
|
||||
&& TREE_CODE (DR_STEP (dr)) != INTEGER_CST)
|
||||
{
|
||||
if (nested_in_vect_loop_p (loop, stmt))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: not suitable for strided "
|
||||
"load ");
|
||||
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
STMT_VINFO_STRIDED_P (stmt_info) = true;
|
||||
}
|
||||
}
|
||||
|
||||
/* If we stopped analysis at the first dataref we could not analyze
|
||||
|
|
|
@ -1774,40 +1774,12 @@ vect_analyze_loop_costing (loop_vec_info loop_vinfo)
|
|||
return 1;
|
||||
}
|
||||
|
||||
|
||||
/* Function vect_analyze_loop_2.
|
||||
|
||||
Apply a set of analyses on LOOP, and create a loop_vec_info struct
|
||||
for it. The different analyses will record information in the
|
||||
loop_vec_info struct. */
|
||||
static bool
|
||||
vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal)
|
||||
vect_get_datarefs_in_loop (loop_p loop, basic_block *bbs,
|
||||
vec<data_reference_p> *datarefs,
|
||||
unsigned int *n_stmts)
|
||||
{
|
||||
bool ok;
|
||||
int res;
|
||||
unsigned int max_vf = MAX_VECTORIZATION_FACTOR;
|
||||
poly_uint64 min_vf = 2;
|
||||
unsigned int n_stmts = 0;
|
||||
|
||||
/* The first group of checks is independent of the vector size. */
|
||||
fatal = true;
|
||||
|
||||
/* Find all data references in the loop (which correspond to vdefs/vuses)
|
||||
and analyze their evolution in the loop. */
|
||||
|
||||
basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
|
||||
|
||||
loop_p loop = LOOP_VINFO_LOOP (loop_vinfo);
|
||||
if (!find_loop_nest (loop, &LOOP_VINFO_LOOP_NEST (loop_vinfo)))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: loop nest containing two "
|
||||
"or more consecutive inner loops cannot be "
|
||||
"vectorized\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
*n_stmts = 0;
|
||||
for (unsigned i = 0; i < loop->num_nodes; i++)
|
||||
for (gimple_stmt_iterator gsi = gsi_start_bb (bbs[i]);
|
||||
!gsi_end_p (gsi); gsi_next (&gsi))
|
||||
|
@ -1815,9 +1787,8 @@ vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal)
|
|||
gimple *stmt = gsi_stmt (gsi);
|
||||
if (is_gimple_debug (stmt))
|
||||
continue;
|
||||
++n_stmts;
|
||||
if (!find_data_references_in_stmt (loop, stmt,
|
||||
&LOOP_VINFO_DATAREFS (loop_vinfo)))
|
||||
++(*n_stmts);
|
||||
if (!vect_find_stmt_data_reference (loop, stmt, datarefs))
|
||||
{
|
||||
if (is_gimple_call (stmt) && loop->safelen)
|
||||
{
|
||||
|
@ -1849,14 +1820,55 @@ vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal)
|
|||
}
|
||||
}
|
||||
}
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: loop contains function "
|
||||
"calls or data references that cannot "
|
||||
"be analyzed\n");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Function vect_analyze_loop_2.
|
||||
|
||||
Apply a set of analyses on LOOP, and create a loop_vec_info struct
|
||||
for it. The different analyses will record information in the
|
||||
loop_vec_info struct. */
|
||||
static bool
|
||||
vect_analyze_loop_2 (loop_vec_info loop_vinfo, bool &fatal)
|
||||
{
|
||||
bool ok;
|
||||
int res;
|
||||
unsigned int max_vf = MAX_VECTORIZATION_FACTOR;
|
||||
poly_uint64 min_vf = 2;
|
||||
|
||||
/* The first group of checks is independent of the vector size. */
|
||||
fatal = true;
|
||||
|
||||
/* Find all data references in the loop (which correspond to vdefs/vuses)
|
||||
and analyze their evolution in the loop. */
|
||||
|
||||
loop_p loop = LOOP_VINFO_LOOP (loop_vinfo);
|
||||
if (!find_loop_nest (loop, &LOOP_VINFO_LOOP_NEST (loop_vinfo)))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: loop nest containing two "
|
||||
"or more consecutive inner loops cannot be "
|
||||
"vectorized\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Gather the data references and count stmts in the loop. */
|
||||
unsigned int n_stmts;
|
||||
if (!vect_get_datarefs_in_loop (loop, LOOP_VINFO_BBS (loop_vinfo),
|
||||
&LOOP_VINFO_DATAREFS (loop_vinfo),
|
||||
&n_stmts))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
"not vectorized: loop contains function "
|
||||
"calls or data references that cannot "
|
||||
"be analyzed\n");
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Analyze the data references and also adjust the minimal
|
||||
vectorization factor according to the loads and stores. */
|
||||
|
|
|
@ -3011,7 +3011,7 @@ vect_slp_bb (basic_block bb)
|
|||
if (gimple_location (stmt) != UNKNOWN_LOCATION)
|
||||
vect_location = gimple_location (stmt);
|
||||
|
||||
if (!find_data_references_in_stmt (NULL, stmt, &datarefs))
|
||||
if (!vect_find_stmt_data_reference (NULL, stmt, &datarefs))
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -1495,6 +1495,8 @@ extern bool vect_gather_scatter_fn_p (bool, bool, tree, tree, unsigned int,
|
|||
signop, int, internal_fn *, tree *);
|
||||
extern bool vect_check_gather_scatter (gimple *, loop_vec_info,
|
||||
gather_scatter_info *);
|
||||
extern bool vect_find_stmt_data_reference (loop_p, gimple *,
|
||||
vec<data_reference_p> *);
|
||||
extern bool vect_analyze_data_refs (vec_info *, poly_uint64 *);
|
||||
extern void vect_record_base_alignments (vec_info *);
|
||||
extern tree vect_create_data_ref_ptr (gimple *, tree, struct loop *, tree,
|
||||
|
|
Loading…
Add table
Reference in a new issue