tree-vectorizer.h (vect_dr_stmt): New function.
2018-06-01 Richard Biener <rguenther@suse.de> * tree-vectorizer.h (vect_dr_stmt): New function. (vect_get_load_cost): Adjust. (vect_get_store_cost): Likewise. * tree-vect-data-refs.c (vect_analyze_data_ref_dependence): Use vect_dr_stmt instead of DR_SMTT. (vect_record_base_alignments): Likewise. (vect_calculate_target_alignment): Likewise. (vect_compute_data_ref_alignment): Likewise and make static. (vect_update_misalignment_for_peel): Likewise. (vect_verify_datarefs_alignment): Likewise. (vector_alignment_reachable_p): Likewise. (vect_get_data_access_cost): Likewise. Pass down vinfo to vect_get_load_cost/vect_get_store_cost instead of DR. (vect_get_peeling_costs_all_drs): Likewise. (vect_peeling_hash_get_lowest_cost): Likewise. (vect_enhance_data_refs_alignment): Likewise. (vect_find_same_alignment_drs): Likewise. (vect_analyze_data_refs_alignment): Likewise. (vect_analyze_group_access_1): Likewise. (vect_analyze_group_access): Likewise. (vect_analyze_data_ref_access): Likewise. (vect_analyze_data_ref_accesses): Likewise. (vect_vfa_segment_size): Likewise. (vect_small_gap_p): Likewise. (vectorizable_with_step_bound_p): Likewise. (vect_prune_runtime_alias_test_list): Likewise. (vect_analyze_data_refs): Likewise. (vect_supportable_dr_alignment): Likewise. * tree-vect-loop-manip.c (get_misalign_in_elems): Likewise. (vect_gen_prolog_loop_niters): Likewise. * tree-vect-loop.c (vect_analyze_loop_2): Likewise. * tree-vect-patterns.c (vect_recog_bool_pattern): Do not modify DR_STMT. (vect_recog_mask_conversion_pattern): Likewise. (vect_try_gather_scatter_pattern): Likewise. * tree-vect-stmts.c (vect_model_store_cost): Pass stmt_info to vect_get_store_cost. (vect_get_store_cost): Get stmt_info instead of DR. (vect_model_load_cost): Pass stmt_info to vect_get_load_cost. (vect_get_load_cost): Get stmt_info instead of DR. From-SVN: r261062
This commit is contained in:
parent
47cac108ef
commit
57c454d29c
7 changed files with 139 additions and 95 deletions
|
@ -1,3 +1,46 @@
|
|||
2018-06-01 Richard Biener <rguenther@suse.de>
|
||||
|
||||
* tree-vectorizer.h (vect_dr_stmt): New function.
|
||||
(vect_get_load_cost): Adjust.
|
||||
(vect_get_store_cost): Likewise.
|
||||
* tree-vect-data-refs.c (vect_analyze_data_ref_dependence):
|
||||
Use vect_dr_stmt instead of DR_SMTT.
|
||||
(vect_record_base_alignments): Likewise.
|
||||
(vect_calculate_target_alignment): Likewise.
|
||||
(vect_compute_data_ref_alignment): Likewise and make static.
|
||||
(vect_update_misalignment_for_peel): Likewise.
|
||||
(vect_verify_datarefs_alignment): Likewise.
|
||||
(vector_alignment_reachable_p): Likewise.
|
||||
(vect_get_data_access_cost): Likewise. Pass down
|
||||
vinfo to vect_get_load_cost/vect_get_store_cost instead of DR.
|
||||
(vect_get_peeling_costs_all_drs): Likewise.
|
||||
(vect_peeling_hash_get_lowest_cost): Likewise.
|
||||
(vect_enhance_data_refs_alignment): Likewise.
|
||||
(vect_find_same_alignment_drs): Likewise.
|
||||
(vect_analyze_data_refs_alignment): Likewise.
|
||||
(vect_analyze_group_access_1): Likewise.
|
||||
(vect_analyze_group_access): Likewise.
|
||||
(vect_analyze_data_ref_access): Likewise.
|
||||
(vect_analyze_data_ref_accesses): Likewise.
|
||||
(vect_vfa_segment_size): Likewise.
|
||||
(vect_small_gap_p): Likewise.
|
||||
(vectorizable_with_step_bound_p): Likewise.
|
||||
(vect_prune_runtime_alias_test_list): Likewise.
|
||||
(vect_analyze_data_refs): Likewise.
|
||||
(vect_supportable_dr_alignment): Likewise.
|
||||
* tree-vect-loop-manip.c (get_misalign_in_elems): Likewise.
|
||||
(vect_gen_prolog_loop_niters): Likewise.
|
||||
* tree-vect-loop.c (vect_analyze_loop_2): Likewise.
|
||||
* tree-vect-patterns.c (vect_recog_bool_pattern): Do not
|
||||
modify DR_STMT.
|
||||
(vect_recog_mask_conversion_pattern): Likewise.
|
||||
(vect_try_gather_scatter_pattern): Likewise.
|
||||
* tree-vect-stmts.c (vect_model_store_cost): Pass stmt_info
|
||||
to vect_get_store_cost.
|
||||
(vect_get_store_cost): Get stmt_info instead of DR.
|
||||
(vect_model_load_cost): Pass stmt_info to vect_get_load_cost.
|
||||
(vect_get_load_cost): Get stmt_info instead of DR.
|
||||
|
||||
2018-06-01 Richard Biener <rguenther@suse.de>
|
||||
|
||||
PR middle-end/86017
|
||||
|
|
|
@ -290,8 +290,8 @@ vect_analyze_data_ref_dependence (struct data_dependence_relation *ddr,
|
|||
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
|
||||
struct data_reference *dra = DDR_A (ddr);
|
||||
struct data_reference *drb = DDR_B (ddr);
|
||||
stmt_vec_info stmtinfo_a = vinfo_for_stmt (DR_STMT (dra));
|
||||
stmt_vec_info stmtinfo_b = vinfo_for_stmt (DR_STMT (drb));
|
||||
stmt_vec_info stmtinfo_a = vinfo_for_stmt (vect_dr_stmt (dra));
|
||||
stmt_vec_info stmtinfo_b = vinfo_for_stmt (vect_dr_stmt (drb));
|
||||
lambda_vector dist_v;
|
||||
unsigned int loop_depth;
|
||||
|
||||
|
@ -467,7 +467,8 @@ vect_analyze_data_ref_dependence (struct data_dependence_relation *ddr,
|
|||
... = a[i];
|
||||
a[i+1] = ...;
|
||||
where loads from the group interleave with the store. */
|
||||
if (!vect_preserves_scalar_order_p (DR_STMT (dra), DR_STMT (drb)))
|
||||
if (!vect_preserves_scalar_order_p (vect_dr_stmt(dra),
|
||||
vect_dr_stmt (drb)))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
|
@ -618,9 +619,9 @@ vect_slp_analyze_data_ref_dependence (struct data_dependence_relation *ddr)
|
|||
|
||||
/* If dra and drb are part of the same interleaving chain consider
|
||||
them independent. */
|
||||
if (STMT_VINFO_GROUPED_ACCESS (vinfo_for_stmt (DR_STMT (dra)))
|
||||
&& (DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (DR_STMT (dra)))
|
||||
== DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (DR_STMT (drb)))))
|
||||
if (STMT_VINFO_GROUPED_ACCESS (vinfo_for_stmt (vect_dr_stmt (dra)))
|
||||
&& (DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (vect_dr_stmt (dra)))
|
||||
== DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (vect_dr_stmt (drb)))))
|
||||
return false;
|
||||
|
||||
/* Unknown data dependence. */
|
||||
|
@ -834,22 +835,21 @@ vect_record_base_alignments (vec_info *vinfo)
|
|||
unsigned int i;
|
||||
FOR_EACH_VEC_ELT (vinfo->datarefs, i, dr)
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
if (!DR_IS_CONDITIONAL_IN_STMT (dr)
|
||||
&& STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (stmt)))
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
vect_record_base_alignment (vinfo, stmt, &DR_INNERMOST (dr));
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
if (!DR_IS_CONDITIONAL_IN_STMT (dr)
|
||||
&& STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (stmt)))
|
||||
{
|
||||
vect_record_base_alignment (vinfo, stmt, &DR_INNERMOST (dr));
|
||||
|
||||
/* If DR is nested in the loop that is being vectorized, we can also
|
||||
record the alignment of the base wrt the outer loop. */
|
||||
if (loop && nested_in_vect_loop_p (loop, stmt))
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
vect_record_base_alignment
|
||||
(vinfo, stmt, &STMT_VINFO_DR_WRT_VEC_LOOP (stmt_info));
|
||||
}
|
||||
}
|
||||
/* If DR is nested in the loop that is being vectorized, we can also
|
||||
record the alignment of the base wrt the outer loop. */
|
||||
if (loop && nested_in_vect_loop_p (loop, stmt))
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
vect_record_base_alignment
|
||||
(vinfo, stmt, &STMT_VINFO_DR_WRT_VEC_LOOP (stmt_info));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -858,7 +858,7 @@ vect_record_base_alignments (vec_info *vinfo)
|
|||
static unsigned int
|
||||
vect_calculate_target_alignment (struct data_reference *dr)
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
return targetm.vectorize.preferred_vector_alignment (vectype);
|
||||
|
@ -876,10 +876,10 @@ vect_calculate_target_alignment (struct data_reference *dr)
|
|||
FOR NOW: No analysis is actually performed. Misalignment is calculated
|
||||
only for trivial cases. TODO. */
|
||||
|
||||
bool
|
||||
static bool
|
||||
vect_compute_data_ref_alignment (struct data_reference *dr)
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
vec_base_alignments *base_alignments = &stmt_info->vinfo->base_alignments;
|
||||
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
|
||||
|
@ -1068,8 +1068,8 @@ vect_update_misalignment_for_peel (struct data_reference *dr,
|
|||
struct data_reference *current_dr;
|
||||
int dr_size = vect_get_scalar_dr_size (dr);
|
||||
int dr_peel_size = vect_get_scalar_dr_size (dr_peel);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (DR_STMT (dr));
|
||||
stmt_vec_info peel_stmt_info = vinfo_for_stmt (DR_STMT (dr_peel));
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (vect_dr_stmt (dr));
|
||||
stmt_vec_info peel_stmt_info = vinfo_for_stmt (vect_dr_stmt (dr_peel));
|
||||
|
||||
/* For interleaved data accesses the step in the loop must be multiplied by
|
||||
the size of the interleaving group. */
|
||||
|
@ -1081,7 +1081,7 @@ vect_update_misalignment_for_peel (struct data_reference *dr,
|
|||
/* It can be assumed that the data refs with the same alignment as dr_peel
|
||||
are aligned in the vector loop. */
|
||||
same_aligned_drs
|
||||
= STMT_VINFO_SAME_ALIGN_REFS (vinfo_for_stmt (DR_STMT (dr_peel)));
|
||||
= STMT_VINFO_SAME_ALIGN_REFS (vinfo_for_stmt (vect_dr_stmt (dr_peel)));
|
||||
FOR_EACH_VEC_ELT (same_aligned_drs, i, current_dr)
|
||||
{
|
||||
if (current_dr != dr)
|
||||
|
@ -1161,7 +1161,7 @@ vect_verify_datarefs_alignment (loop_vec_info vinfo)
|
|||
|
||||
FOR_EACH_VEC_ELT (datarefs, i, dr)
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
|
||||
if (!STMT_VINFO_RELEVANT_P (stmt_info))
|
||||
|
@ -1206,7 +1206,7 @@ not_size_aligned (tree exp)
|
|||
static bool
|
||||
vector_alignment_reachable_p (struct data_reference *dr)
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
|
||||
|
@ -1276,7 +1276,7 @@ vect_get_data_access_cost (struct data_reference *dr,
|
|||
stmt_vector_for_cost *body_cost_vec,
|
||||
stmt_vector_for_cost *prologue_cost_vec)
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
|
||||
int ncopies;
|
||||
|
@ -1287,10 +1287,10 @@ vect_get_data_access_cost (struct data_reference *dr,
|
|||
ncopies = vect_get_num_copies (loop_vinfo, STMT_VINFO_VECTYPE (stmt_info));
|
||||
|
||||
if (DR_IS_READ (dr))
|
||||
vect_get_load_cost (dr, ncopies, true, inside_cost, outside_cost,
|
||||
vect_get_load_cost (stmt_info, ncopies, true, inside_cost, outside_cost,
|
||||
prologue_cost_vec, body_cost_vec, false);
|
||||
else
|
||||
vect_get_store_cost (dr, ncopies, inside_cost, body_cost_vec);
|
||||
vect_get_store_cost (stmt_info, ncopies, inside_cost, body_cost_vec);
|
||||
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_NOTE, vect_location,
|
||||
|
@ -1406,7 +1406,7 @@ vect_get_peeling_costs_all_drs (vec<data_reference_p> datarefs,
|
|||
|
||||
FOR_EACH_VEC_ELT (datarefs, i, dr)
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
if (!STMT_VINFO_RELEVANT_P (stmt_info))
|
||||
continue;
|
||||
|
@ -1447,7 +1447,7 @@ vect_peeling_hash_get_lowest_cost (_vect_peel_info **slot,
|
|||
vect_peel_info elem = *slot;
|
||||
int dummy;
|
||||
unsigned int inside_cost = 0, outside_cost = 0;
|
||||
gimple *stmt = DR_STMT (elem->dr);
|
||||
gimple *stmt = vect_dr_stmt (elem->dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
|
||||
stmt_vector_for_cost prologue_cost_vec, body_cost_vec,
|
||||
|
@ -1543,7 +1543,7 @@ vect_peeling_supportable (loop_vec_info loop_vinfo, struct data_reference *dr0,
|
|||
if (dr == dr0)
|
||||
continue;
|
||||
|
||||
stmt = DR_STMT (dr);
|
||||
stmt = vect_dr_stmt (dr);
|
||||
stmt_info = vinfo_for_stmt (stmt);
|
||||
/* For interleaving, only the alignment of the first access
|
||||
matters. */
|
||||
|
@ -1727,7 +1727,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
|
|||
|
||||
FOR_EACH_VEC_ELT (datarefs, i, dr)
|
||||
{
|
||||
stmt = DR_STMT (dr);
|
||||
stmt = vect_dr_stmt (dr);
|
||||
stmt_info = vinfo_for_stmt (stmt);
|
||||
|
||||
if (!STMT_VINFO_RELEVANT_P (stmt_info))
|
||||
|
@ -1938,7 +1938,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
|
|||
|
||||
peel_for_unknown_alignment.peel_info.count = 1
|
||||
+ STMT_VINFO_SAME_ALIGN_REFS
|
||||
(vinfo_for_stmt (DR_STMT (dr0))).length ();
|
||||
(vinfo_for_stmt (vect_dr_stmt (dr0))).length ();
|
||||
}
|
||||
|
||||
peel_for_unknown_alignment.peel_info.npeel = 0;
|
||||
|
@ -2019,7 +2019,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
|
|||
|
||||
if (do_peeling)
|
||||
{
|
||||
stmt = DR_STMT (dr0);
|
||||
stmt = vect_dr_stmt (dr0);
|
||||
stmt_info = vinfo_for_stmt (stmt);
|
||||
vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
|
||||
|
@ -2043,7 +2043,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
|
|||
/* For interleaved data access every iteration accesses all the
|
||||
members of the group, therefore we divide the number of iterations
|
||||
by the group size. */
|
||||
stmt_info = vinfo_for_stmt (DR_STMT (dr0));
|
||||
stmt_info = vinfo_for_stmt (vect_dr_stmt (dr0));
|
||||
if (STMT_VINFO_GROUPED_ACCESS (stmt_info))
|
||||
npeel /= DR_GROUP_SIZE (stmt_info);
|
||||
|
||||
|
@ -2117,7 +2117,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
|
|||
{
|
||||
/* Strided accesses perform only component accesses, alignment
|
||||
is irrelevant for them. */
|
||||
stmt_info = vinfo_for_stmt (DR_STMT (dr));
|
||||
stmt_info = vinfo_for_stmt (vect_dr_stmt (dr));
|
||||
if (STMT_VINFO_STRIDED_P (stmt_info)
|
||||
&& !STMT_VINFO_GROUPED_ACCESS (stmt_info))
|
||||
continue;
|
||||
|
@ -2166,7 +2166,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
|
|||
{
|
||||
FOR_EACH_VEC_ELT (datarefs, i, dr)
|
||||
{
|
||||
stmt = DR_STMT (dr);
|
||||
stmt = vect_dr_stmt (dr);
|
||||
stmt_info = vinfo_for_stmt (stmt);
|
||||
|
||||
/* For interleaving, only the alignment of the first access
|
||||
|
@ -2202,7 +2202,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
|
|||
break;
|
||||
}
|
||||
|
||||
stmt = DR_STMT (dr);
|
||||
stmt = vect_dr_stmt (dr);
|
||||
vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
|
||||
gcc_assert (vectype);
|
||||
|
||||
|
@ -2232,7 +2232,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
|
|||
|| LOOP_VINFO_PTR_MASK (loop_vinfo) == mask);
|
||||
LOOP_VINFO_PTR_MASK (loop_vinfo) = mask;
|
||||
LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).safe_push (
|
||||
DR_STMT (dr));
|
||||
vect_dr_stmt (dr));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2292,8 +2292,8 @@ vect_find_same_alignment_drs (struct data_dependence_relation *ddr)
|
|||
{
|
||||
struct data_reference *dra = DDR_A (ddr);
|
||||
struct data_reference *drb = DDR_B (ddr);
|
||||
stmt_vec_info stmtinfo_a = vinfo_for_stmt (DR_STMT (dra));
|
||||
stmt_vec_info stmtinfo_b = vinfo_for_stmt (DR_STMT (drb));
|
||||
stmt_vec_info stmtinfo_a = vinfo_for_stmt (vect_dr_stmt (dra));
|
||||
stmt_vec_info stmtinfo_b = vinfo_for_stmt (vect_dr_stmt (drb));
|
||||
|
||||
if (DDR_ARE_DEPENDENT (ddr) == chrec_known)
|
||||
return;
|
||||
|
@ -2364,7 +2364,7 @@ vect_analyze_data_refs_alignment (loop_vec_info vinfo)
|
|||
vect_record_base_alignments (vinfo);
|
||||
FOR_EACH_VEC_ELT (datarefs, i, dr)
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (DR_STMT (dr));
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (vect_dr_stmt (dr));
|
||||
if (STMT_VINFO_VECTORIZABLE (stmt_info)
|
||||
&& !vect_compute_data_ref_alignment (dr))
|
||||
{
|
||||
|
@ -2458,7 +2458,7 @@ vect_analyze_group_access_1 (struct data_reference *dr)
|
|||
tree step = DR_STEP (dr);
|
||||
tree scalar_type = TREE_TYPE (DR_REF (dr));
|
||||
HOST_WIDE_INT type_size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (scalar_type));
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
|
||||
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
|
||||
|
@ -2535,7 +2535,7 @@ vect_analyze_group_access_1 (struct data_reference *dr)
|
|||
if (bb_vinfo)
|
||||
{
|
||||
/* Mark the statement as unvectorizable. */
|
||||
STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr))) = false;
|
||||
STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (vect_dr_stmt (dr))) = false;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -2696,7 +2696,7 @@ vect_analyze_group_access (struct data_reference *dr)
|
|||
{
|
||||
/* Dissolve the group if present. */
|
||||
gimple *next;
|
||||
gimple *stmt = DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (DR_STMT (dr)));
|
||||
gimple *stmt = DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (vect_dr_stmt (dr)));
|
||||
while (stmt)
|
||||
{
|
||||
stmt_vec_info vinfo = vinfo_for_stmt (stmt);
|
||||
|
@ -2719,7 +2719,7 @@ vect_analyze_data_ref_access (struct data_reference *dr)
|
|||
{
|
||||
tree step = DR_STEP (dr);
|
||||
tree scalar_type = TREE_TYPE (DR_REF (dr));
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
|
||||
struct loop *loop = NULL;
|
||||
|
@ -2948,7 +2948,7 @@ vect_analyze_data_ref_accesses (vec_info *vinfo)
|
|||
for (i = 0; i < datarefs_copy.length () - 1;)
|
||||
{
|
||||
data_reference_p dra = datarefs_copy[i];
|
||||
stmt_vec_info stmtinfo_a = vinfo_for_stmt (DR_STMT (dra));
|
||||
stmt_vec_info stmtinfo_a = vinfo_for_stmt (vect_dr_stmt (dra));
|
||||
stmt_vec_info lastinfo = NULL;
|
||||
if (!STMT_VINFO_VECTORIZABLE (stmtinfo_a)
|
||||
|| STMT_VINFO_GATHER_SCATTER_P (stmtinfo_a))
|
||||
|
@ -2959,7 +2959,7 @@ vect_analyze_data_ref_accesses (vec_info *vinfo)
|
|||
for (i = i + 1; i < datarefs_copy.length (); ++i)
|
||||
{
|
||||
data_reference_p drb = datarefs_copy[i];
|
||||
stmt_vec_info stmtinfo_b = vinfo_for_stmt (DR_STMT (drb));
|
||||
stmt_vec_info stmtinfo_b = vinfo_for_stmt (vect_dr_stmt (drb));
|
||||
if (!STMT_VINFO_VECTORIZABLE (stmtinfo_b)
|
||||
|| STMT_VINFO_GATHER_SCATTER_P (stmtinfo_b))
|
||||
break;
|
||||
|
@ -2983,7 +2983,7 @@ vect_analyze_data_ref_accesses (vec_info *vinfo)
|
|||
|| data_ref_compare_tree (DR_BASE_ADDRESS (dra),
|
||||
DR_BASE_ADDRESS (drb)) != 0
|
||||
|| data_ref_compare_tree (DR_OFFSET (dra), DR_OFFSET (drb)) != 0
|
||||
|| !can_group_stmts_p (DR_STMT (dra), DR_STMT (drb)))
|
||||
|| !can_group_stmts_p (vect_dr_stmt (dra), vect_dr_stmt (drb)))
|
||||
break;
|
||||
|
||||
/* Check that the data-refs have the same constant size. */
|
||||
|
@ -3072,17 +3072,17 @@ vect_analyze_data_ref_accesses (vec_info *vinfo)
|
|||
/* Link the found element into the group list. */
|
||||
if (!DR_GROUP_FIRST_ELEMENT (stmtinfo_a))
|
||||
{
|
||||
DR_GROUP_FIRST_ELEMENT (stmtinfo_a) = DR_STMT (dra);
|
||||
DR_GROUP_FIRST_ELEMENT (stmtinfo_a) = vect_dr_stmt (dra);
|
||||
lastinfo = stmtinfo_a;
|
||||
}
|
||||
DR_GROUP_FIRST_ELEMENT (stmtinfo_b) = DR_STMT (dra);
|
||||
DR_GROUP_NEXT_ELEMENT (lastinfo) = DR_STMT (drb);
|
||||
DR_GROUP_FIRST_ELEMENT (stmtinfo_b) = vect_dr_stmt (dra);
|
||||
DR_GROUP_NEXT_ELEMENT (lastinfo) = vect_dr_stmt (drb);
|
||||
lastinfo = stmtinfo_b;
|
||||
}
|
||||
}
|
||||
|
||||
FOR_EACH_VEC_ELT (datarefs_copy, i, dr)
|
||||
if (STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr)))
|
||||
if (STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (vect_dr_stmt (dr)))
|
||||
&& !vect_analyze_data_ref_access (dr))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
|
@ -3092,7 +3092,7 @@ vect_analyze_data_ref_accesses (vec_info *vinfo)
|
|||
if (is_a <bb_vec_info> (vinfo))
|
||||
{
|
||||
/* Mark the statement as not vectorizable. */
|
||||
STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr))) = false;
|
||||
STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (vect_dr_stmt (dr))) = false;
|
||||
continue;
|
||||
}
|
||||
else
|
||||
|
@ -3133,13 +3133,13 @@ vect_vfa_segment_size (struct data_reference *dr, tree length_factor)
|
|||
static unsigned HOST_WIDE_INT
|
||||
vect_vfa_access_size (data_reference *dr)
|
||||
{
|
||||
stmt_vec_info stmt_vinfo = vinfo_for_stmt (DR_STMT (dr));
|
||||
stmt_vec_info stmt_vinfo = vinfo_for_stmt (vect_dr_stmt (dr));
|
||||
tree ref_type = TREE_TYPE (DR_REF (dr));
|
||||
unsigned HOST_WIDE_INT ref_size = tree_to_uhwi (TYPE_SIZE_UNIT (ref_type));
|
||||
unsigned HOST_WIDE_INT access_size = ref_size;
|
||||
if (DR_GROUP_FIRST_ELEMENT (stmt_vinfo))
|
||||
{
|
||||
gcc_assert (DR_GROUP_FIRST_ELEMENT (stmt_vinfo) == DR_STMT (dr));
|
||||
gcc_assert (DR_GROUP_FIRST_ELEMENT (stmt_vinfo) == vect_dr_stmt (dr));
|
||||
access_size *= DR_GROUP_SIZE (stmt_vinfo) - DR_GROUP_GAP (stmt_vinfo);
|
||||
}
|
||||
if (STMT_VINFO_VEC_STMT (stmt_vinfo)
|
||||
|
@ -3307,7 +3307,7 @@ vect_check_lower_bound (loop_vec_info loop_vinfo, tree expr, bool unsigned_p,
|
|||
static bool
|
||||
vect_small_gap_p (loop_vec_info loop_vinfo, data_reference *dr, poly_int64 gap)
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (DR_STMT (dr));
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (vect_dr_stmt (dr));
|
||||
HOST_WIDE_INT count
|
||||
= estimated_poly_value (LOOP_VINFO_VECT_FACTOR (loop_vinfo));
|
||||
if (DR_GROUP_FIRST_ELEMENT (stmt_info))
|
||||
|
@ -3344,7 +3344,8 @@ vectorizable_with_step_bound_p (data_reference *dr_a, data_reference *dr_b,
|
|||
/* If the two accesses could be dependent within a scalar iteration,
|
||||
make sure that we'd retain their order. */
|
||||
if (maybe_gt (init_a + vect_get_scalar_dr_size (dr_a), init_b)
|
||||
&& !vect_preserves_scalar_order_p (DR_STMT (dr_a), DR_STMT (dr_b)))
|
||||
&& !vect_preserves_scalar_order_p (vect_dr_stmt (dr_a),
|
||||
vect_dr_stmt (dr_b)))
|
||||
return false;
|
||||
|
||||
/* There is no alias if abs (DR_STEP) is greater than or equal to
|
||||
|
@ -3440,10 +3441,10 @@ vect_prune_runtime_alias_test_list (loop_vec_info loop_vinfo)
|
|||
}
|
||||
|
||||
dr_a = DDR_A (ddr);
|
||||
stmt_a = DR_STMT (DDR_A (ddr));
|
||||
stmt_a = vect_dr_stmt (DDR_A (ddr));
|
||||
|
||||
dr_b = DDR_B (ddr);
|
||||
stmt_b = DR_STMT (DDR_B (ddr));
|
||||
stmt_b = vect_dr_stmt (DDR_B (ddr));
|
||||
|
||||
/* Skip the pair if inter-iteration dependencies are irrelevant
|
||||
and intra-iteration dependencies are guaranteed to be honored. */
|
||||
|
@ -4094,7 +4095,7 @@ vect_analyze_data_refs (vec_info *vinfo, poly_uint64 *min_vf)
|
|||
poly_uint64 vf;
|
||||
|
||||
gcc_assert (DR_REF (dr));
|
||||
stmt = DR_STMT (dr);
|
||||
stmt = vect_dr_stmt (dr);
|
||||
stmt_info = vinfo_for_stmt (stmt);
|
||||
|
||||
/* Check that analysis of the data-ref succeeded. */
|
||||
|
@ -6472,7 +6473,7 @@ enum dr_alignment_support
|
|||
vect_supportable_dr_alignment (struct data_reference *dr,
|
||||
bool check_aligned_accesses)
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
gimple *stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
machine_mode mode = TYPE_MODE (vectype);
|
||||
|
|
|
@ -1560,7 +1560,7 @@ static tree
|
|||
get_misalign_in_elems (gimple **seq, loop_vec_info loop_vinfo)
|
||||
{
|
||||
struct data_reference *dr = LOOP_VINFO_UNALIGNED_DR (loop_vinfo);
|
||||
gimple *dr_stmt = DR_STMT (dr);
|
||||
gimple *dr_stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (dr_stmt);
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
|
||||
|
@ -1631,7 +1631,7 @@ vect_gen_prolog_loop_niters (loop_vec_info loop_vinfo,
|
|||
tree niters_type = TREE_TYPE (LOOP_VINFO_NITERS (loop_vinfo));
|
||||
gimple_seq stmts = NULL, new_stmts = NULL;
|
||||
tree iters, iters_name;
|
||||
gimple *dr_stmt = DR_STMT (dr);
|
||||
gimple *dr_stmt = vect_dr_stmt (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (dr_stmt);
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
unsigned int target_align = DR_TARGET_ALIGNMENT (dr);
|
||||
|
|
|
@ -2165,7 +2165,7 @@ start_over:
|
|||
{
|
||||
struct data_reference *dr = LOOP_VINFO_UNALIGNED_DR (loop_vinfo);
|
||||
tree vectype
|
||||
= STMT_VINFO_VECTYPE (vinfo_for_stmt (DR_STMT (dr)));
|
||||
= STMT_VINFO_VECTYPE (vinfo_for_stmt (vect_dr_stmt (dr)));
|
||||
niters_th += TYPE_VECTOR_SUBPARTS (vectype) - 1;
|
||||
}
|
||||
else
|
||||
|
|
|
@ -3880,7 +3880,6 @@ vect_recog_bool_pattern (vec<gimple *> *stmts, tree *type_in,
|
|||
= STMT_VINFO_DATA_REF (stmt_vinfo);
|
||||
STMT_VINFO_DR_WRT_VEC_LOOP (pattern_stmt_info)
|
||||
= STMT_VINFO_DR_WRT_VEC_LOOP (stmt_vinfo);
|
||||
DR_STMT (STMT_VINFO_DATA_REF (stmt_vinfo)) = pattern_stmt;
|
||||
*type_out = vectype;
|
||||
*type_in = vectype;
|
||||
stmts->safe_push (last_stmt);
|
||||
|
@ -4017,7 +4016,6 @@ vect_recog_mask_conversion_pattern (vec<gimple *> *stmts, tree *type_in,
|
|||
= STMT_VINFO_DATA_REF (stmt_vinfo);
|
||||
STMT_VINFO_DR_WRT_VEC_LOOP (pattern_stmt_info)
|
||||
= STMT_VINFO_DR_WRT_VEC_LOOP (stmt_vinfo);
|
||||
DR_STMT (STMT_VINFO_DATA_REF (stmt_vinfo)) = pattern_stmt;
|
||||
|
||||
*type_out = vectype1;
|
||||
*type_in = vectype1;
|
||||
|
@ -4376,7 +4374,6 @@ vect_try_gather_scatter_pattern (gimple *stmt, stmt_vec_info last_stmt_info,
|
|||
= STMT_VINFO_DR_WRT_VEC_LOOP (stmt_info);
|
||||
STMT_VINFO_GATHER_SCATTER_P (pattern_stmt_info)
|
||||
= STMT_VINFO_GATHER_SCATTER_P (stmt_info);
|
||||
DR_STMT (dr) = pattern_stmt;
|
||||
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
*type_out = vectype;
|
||||
|
|
|
@ -995,7 +995,6 @@ vect_model_store_cost (stmt_vec_info stmt_info, int ncopies,
|
|||
stmt_vector_for_cost *cost_vec)
|
||||
{
|
||||
unsigned int inside_cost = 0, prologue_cost = 0;
|
||||
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
|
||||
gimple *first_stmt = STMT_VINFO_STMT (stmt_info);
|
||||
bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info);
|
||||
|
||||
|
@ -1016,10 +1015,7 @@ vect_model_store_cost (stmt_vec_info stmt_info, int ncopies,
|
|||
/* Grouped stores update all elements in the group at once,
|
||||
so we want the DR for the first statement. */
|
||||
if (!slp_node && grouped_access_p)
|
||||
{
|
||||
first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info);
|
||||
dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt));
|
||||
}
|
||||
first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info);
|
||||
|
||||
/* True if we should include any once-per-group costs as well as
|
||||
the cost of the statement itself. For SLP we only get called
|
||||
|
@ -1058,7 +1054,7 @@ vect_model_store_cost (stmt_vec_info stmt_info, int ncopies,
|
|||
scalar_store, stmt_info, 0, vect_body);
|
||||
}
|
||||
else
|
||||
vect_get_store_cost (dr, ncopies, &inside_cost, cost_vec);
|
||||
vect_get_store_cost (stmt_info, ncopies, &inside_cost, cost_vec);
|
||||
|
||||
if (memory_access_type == VMAT_ELEMENTWISE
|
||||
|| memory_access_type == VMAT_STRIDED_SLP)
|
||||
|
@ -1079,13 +1075,12 @@ vect_model_store_cost (stmt_vec_info stmt_info, int ncopies,
|
|||
|
||||
/* Calculate cost of DR's memory access. */
|
||||
void
|
||||
vect_get_store_cost (struct data_reference *dr, int ncopies,
|
||||
vect_get_store_cost (stmt_vec_info stmt_info, int ncopies,
|
||||
unsigned int *inside_cost,
|
||||
stmt_vector_for_cost *body_cost_vec)
|
||||
{
|
||||
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
|
||||
int alignment_support_scheme = vect_supportable_dr_alignment (dr, false);
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
|
||||
switch (alignment_support_scheme)
|
||||
{
|
||||
|
@ -1145,7 +1140,6 @@ vect_model_load_cost (stmt_vec_info stmt_info, unsigned ncopies,
|
|||
stmt_vector_for_cost *cost_vec)
|
||||
{
|
||||
gimple *first_stmt = STMT_VINFO_STMT (stmt_info);
|
||||
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
|
||||
unsigned int inside_cost = 0, prologue_cost = 0;
|
||||
bool grouped_access_p = STMT_VINFO_GROUPED_ACCESS (stmt_info);
|
||||
|
||||
|
@ -1198,16 +1192,10 @@ vect_model_load_cost (stmt_vec_info stmt_info, unsigned ncopies,
|
|||
+ assumed_nunits - 1) / assumed_nunits);
|
||||
}
|
||||
|
||||
/* ??? Need to transition load permutation (and load cost) handling
|
||||
from vect_analyze_slp_cost_1 to here. */
|
||||
|
||||
/* Grouped loads read all elements in the group at once,
|
||||
so we want the DR for the first statement. */
|
||||
if (!slp_node && grouped_access_p)
|
||||
{
|
||||
first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info);
|
||||
dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt));
|
||||
}
|
||||
first_stmt = DR_GROUP_FIRST_ELEMENT (stmt_info);
|
||||
|
||||
/* True if we should include any once-per-group costs as well as
|
||||
the cost of the statement itself. For SLP we only get called
|
||||
|
@ -1246,7 +1234,7 @@ vect_model_load_cost (stmt_vec_info stmt_info, unsigned ncopies,
|
|||
scalar_load, stmt_info, 0, vect_body);
|
||||
}
|
||||
else
|
||||
vect_get_load_cost (dr, ncopies, first_stmt_p,
|
||||
vect_get_load_cost (stmt_info, ncopies, first_stmt_p,
|
||||
&inside_cost, &prologue_cost,
|
||||
cost_vec, cost_vec, true);
|
||||
if (memory_access_type == VMAT_ELEMENTWISE
|
||||
|
@ -1263,16 +1251,15 @@ vect_model_load_cost (stmt_vec_info stmt_info, unsigned ncopies,
|
|||
|
||||
/* Calculate cost of DR's memory access. */
|
||||
void
|
||||
vect_get_load_cost (struct data_reference *dr, int ncopies,
|
||||
vect_get_load_cost (stmt_vec_info stmt_info, int ncopies,
|
||||
bool add_realign_cost, unsigned int *inside_cost,
|
||||
unsigned int *prologue_cost,
|
||||
stmt_vector_for_cost *prologue_cost_vec,
|
||||
stmt_vector_for_cost *body_cost_vec,
|
||||
bool record_prologue_costs)
|
||||
{
|
||||
data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
|
||||
int alignment_support_scheme = vect_supportable_dr_alignment (dr, false);
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
|
||||
switch (alignment_support_scheme)
|
||||
{
|
||||
|
|
|
@ -1305,6 +1305,22 @@ vect_dr_behavior (data_reference *dr)
|
|||
return &STMT_VINFO_DR_WRT_VEC_LOOP (stmt_info);
|
||||
}
|
||||
|
||||
/* Return the stmt DR is in. For DR_STMT that have been replaced by
|
||||
a pattern this returns the corresponding pattern stmt. Otherwise
|
||||
DR_STMT is returned. */
|
||||
|
||||
inline gimple *
|
||||
vect_dr_stmt (data_reference *dr)
|
||||
{
|
||||
gimple *stmt = DR_STMT (dr);
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
if (STMT_VINFO_IN_PATTERN_P (stmt_info))
|
||||
return STMT_VINFO_RELATED_STMT (stmt_info);
|
||||
/* DR_STMT should never refer to a stmt in a pattern replacement. */
|
||||
gcc_checking_assert (!STMT_VINFO_RELATED_STMT (stmt_info));
|
||||
return stmt;
|
||||
}
|
||||
|
||||
/* Return true if the vect cost model is unlimited. */
|
||||
static inline bool
|
||||
unlimited_cost_model (loop_p loop)
|
||||
|
@ -1473,11 +1489,11 @@ extern bool vect_analyze_stmt (gimple *, bool *, slp_tree, slp_instance,
|
|||
extern bool vectorizable_condition (gimple *, gimple_stmt_iterator *,
|
||||
gimple **, tree, int, slp_tree,
|
||||
stmt_vector_for_cost *);
|
||||
extern void vect_get_load_cost (struct data_reference *, int, bool,
|
||||
extern void vect_get_load_cost (stmt_vec_info, int, bool,
|
||||
unsigned int *, unsigned int *,
|
||||
stmt_vector_for_cost *,
|
||||
stmt_vector_for_cost *, bool);
|
||||
extern void vect_get_store_cost (struct data_reference *, int,
|
||||
extern void vect_get_store_cost (stmt_vec_info, int,
|
||||
unsigned int *, stmt_vector_for_cost *);
|
||||
extern bool vect_supportable_shift (enum tree_code, tree);
|
||||
extern tree vect_gen_perm_mask_any (tree, const vec_perm_indices &);
|
||||
|
|
Loading…
Add table
Reference in a new issue