diff --git a/gcc/testsuite/gcc.dg/torture/pr112991.c b/gcc/testsuite/gcc.dg/torture/pr112991.c new file mode 100644 index 00000000000..aace9854599 --- /dev/null +++ b/gcc/testsuite/gcc.dg/torture/pr112991.c @@ -0,0 +1,21 @@ +/* { dg-do compile } */ + +typedef struct { + unsigned links[2]; +} RMF_unit; +long RMF_recurseListsBound_count; +int RMF_recurseListsBound_tbl, RMF_recurseListsBound_list_head_1; +unsigned RMF_recurseListsBound_list_head_0; +void RMF_recurseListsBound() { + int list_count = RMF_recurseListsBound_list_head_1; + long link = RMF_recurseListsBound_list_head_0; + for (; RMF_recurseListsBound_count;) { + long next_link = + ((RMF_unit *)&RMF_recurseListsBound_tbl)[link >> 2].links[0]; + if (link) + --RMF_recurseListsBound_count; + link = next_link; + } + while (list_count) + ; +} diff --git a/gcc/tree-if-conv.cc b/gcc/tree-if-conv.cc index f9fd0149937..e169413bb44 100644 --- a/gcc/tree-if-conv.cc +++ b/gcc/tree-if-conv.cc @@ -3734,7 +3734,7 @@ tree_if_conversion (class loop *loop, vec *preds) auto_vec reads_to_lower; auto_vec writes_to_lower; bitmap exit_bbs; - edge pe, e; + edge pe; auto_vec refs; bool loop_versioned; @@ -3891,27 +3891,21 @@ tree_if_conversion (class loop *loop, vec *preds) combine_blocks (loop, loop_versioned); } - /* Perform local CSE, this esp. helps the vectorizer analysis if loads - and stores are involved. CSE only the loop body, not the entry - PHIs, those are to be kept in sync with the non-if-converted copy. - Do this by adding a fake entry edge - we do want to include the - latch as otherwise copies on a reduction path cannot be propagated out. - ??? We'll still keep dead stores though. */ - e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), loop->header, EDGE_FAKE); - exit_bbs = BITMAP_ALLOC (NULL); - for (edge exit : get_loop_exit_edges (loop)) - bitmap_set_bit (exit_bbs, exit->dest->index); - std::pair *name_pair; unsigned ssa_names_idx; FOR_EACH_VEC_ELT (redundant_ssa_names, ssa_names_idx, name_pair) replace_uses_by (name_pair->first, name_pair->second); redundant_ssa_names.release (); - todo |= do_rpo_vn (cfun, loop_preheader_edge (loop), exit_bbs); - - /* Remove the fake edge again. */ - remove_edge (e); + /* Perform local CSE, this esp. helps the vectorizer analysis if loads + and stores are involved. CSE only the loop body, not the entry + PHIs, those are to be kept in sync with the non-if-converted copy. + ??? We'll still keep dead stores though. */ + exit_bbs = BITMAP_ALLOC (NULL); + for (edge exit : get_loop_exit_edges (loop)) + bitmap_set_bit (exit_bbs, exit->dest->index); + todo |= do_rpo_vn (cfun, loop_preheader_edge (loop), exit_bbs, + false, true, true); /* Delete dead predicate computations. */ ifcvt_local_dce (loop); diff --git a/gcc/tree-ssa-sccvn.cc b/gcc/tree-ssa-sccvn.cc index a178b768459..4d3088643c4 100644 --- a/gcc/tree-ssa-sccvn.cc +++ b/gcc/tree-ssa-sccvn.cc @@ -7584,12 +7584,13 @@ eliminate_with_rpo_vn (bitmap inserted_exprs) static unsigned do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs, - bool iterate, bool eliminate, vn_lookup_kind kind); + bool iterate, bool eliminate, bool skip_entry_phis, + vn_lookup_kind kind); void run_rpo_vn (vn_lookup_kind kind) { - do_rpo_vn_1 (cfun, NULL, NULL, true, false, kind); + do_rpo_vn_1 (cfun, NULL, NULL, true, false, false, kind); /* ??? Prune requirement of these. */ constant_to_value_id = new hash_table (23); @@ -8290,11 +8291,13 @@ do_unwind (unwind_state *to, rpo_elim &avail) /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN. If ITERATE is true then treat backedges optimistically as not executed and iterate. If ELIMINATE is true then perform - elimination, otherwise leave that to the caller. */ + elimination, otherwise leave that to the caller. If SKIP_ENTRY_PHIS + is true then force PHI nodes in ENTRY->dest to VARYING. */ static unsigned do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs, - bool iterate, bool eliminate, vn_lookup_kind kind) + bool iterate, bool eliminate, bool skip_entry_phis, + vn_lookup_kind kind) { unsigned todo = 0; default_vn_walk_kind = kind; @@ -8335,10 +8338,10 @@ do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs, if (e != entry && !(e->flags & EDGE_DFS_BACK)) break; - bool skip_entry_phis = e != NULL; - if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS)) + if (e != NULL && dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Region does not contain all edges into " "the entry block, skipping its PHIs.\n"); + skip_entry_phis |= e != NULL; int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn)); for (int i = 0; i < n; ++i) @@ -8715,14 +8718,17 @@ do_rpo_vn_1 (function *fn, edge entry, bitmap exit_bbs, If ITERATE is true then treat backedges optimistically as not executed and iterate. If ELIMINATE is true then perform elimination, otherwise leave that to the caller. + If SKIP_ENTRY_PHIS is true then force PHI nodes in ENTRY->dest to VARYING. KIND specifies the amount of work done for handling memory operations. */ unsigned do_rpo_vn (function *fn, edge entry, bitmap exit_bbs, - bool iterate, bool eliminate, vn_lookup_kind kind) + bool iterate, bool eliminate, bool skip_entry_phis, + vn_lookup_kind kind) { auto_timevar tv (TV_TREE_RPO_VN); - unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate, kind); + unsigned todo = do_rpo_vn_1 (fn, entry, exit_bbs, iterate, eliminate, + skip_entry_phis, kind); free_rpo_vn (); return todo; } @@ -8778,7 +8784,7 @@ pass_fre::execute (function *fun) if (iterate_p) loop_optimizer_init (AVOID_CFG_MODIFICATIONS); - todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, VN_WALKREWRITE); + todo = do_rpo_vn_1 (fun, NULL, NULL, iterate_p, true, false, VN_WALKREWRITE); free_rpo_vn (); if (iterate_p) diff --git a/gcc/tree-ssa-sccvn.h b/gcc/tree-ssa-sccvn.h index 98d70e0ffe0..5e370b85213 100644 --- a/gcc/tree-ssa-sccvn.h +++ b/gcc/tree-ssa-sccvn.h @@ -298,6 +298,7 @@ tree vn_nary_simplify (vn_nary_op_t); unsigned do_rpo_vn (function *, edge, bitmap, /* iterate */ bool = false, /* eliminate */ bool = true, + /* skip_entry_phis */ bool = false, vn_lookup_kind = VN_WALKREWRITE); /* Private interface for PRE. */ diff --git a/gcc/tree-ssa-uninit.cc b/gcc/tree-ssa-uninit.cc index 9a7c7d12dd8..8d169c33729 100644 --- a/gcc/tree-ssa-uninit.cc +++ b/gcc/tree-ssa-uninit.cc @@ -1500,7 +1500,7 @@ execute_early_warn_uninitialized (struct function *fun) elimination to compute edge reachability. Don't bother when we only warn for unconditionally executed code though. */ if (!optimize) - do_rpo_vn (fun, NULL, NULL, false, false, VN_NOWALK); + do_rpo_vn (fun, NULL, NULL, false, false, false, VN_NOWALK); else set_all_edges_as_executable (fun);